---
 gcc/pdbout.c | 1272 +++++++++++++++++++++++++++++++++++++++++++++++++-
 gcc/pdbout.h |  969 ++++++++++++++++++++++++++++++++++++++
 2 files changed, 2238 insertions(+), 3 deletions(-)

diff --git a/gcc/pdbout.c b/gcc/pdbout.c
index 17011134d7a..2f5b52b6fc3 100644
--- a/gcc/pdbout.c
+++ b/gcc/pdbout.c
@@ -32,6 +32,12 @@
 #include "function.h"
 #include "output.h"
 #include "target.h"
+#include "rtl.h"
+#include "insn-config.h"
+#include "reload.h"
+#include "cp/cp-tree.h"
+#include "common/common-target.h"
+#include "except.h"
 
 #define FUNC_BEGIN_LABEL       ".Lstartfunc"
 #define FUNC_END_LABEL         ".Lendfunc"
@@ -44,10 +50,16 @@ static void pdbout_end_epilogue (unsigned int line 
ATTRIBUTE_UNUSED,
 static void pdbout_finish (const char *filename);
 static void pdbout_begin_function (tree func);
 static void pdbout_late_global_decl (tree var);
+static void pdbout_function_decl (tree decl);
+static void pdbout_begin_block (unsigned int line ATTRIBUTE_UNUSED,
+                               unsigned int blocknum);
+static void pdbout_end_block (unsigned int line ATTRIBUTE_UNUSED,
+                             unsigned int blocknum);
 
 static struct pdb_type *find_type (tree t);
 
 static struct pdb_func *funcs = NULL, *cur_func = NULL;
+static struct pdb_block *cur_block = NULL;
 static struct pdb_global_var *global_vars = NULL;
 static struct pdb_type *types = NULL, *last_type = NULL;
 static hash_table <pdb_type_tree_hasher> tree_hash_table (31);
@@ -73,8 +85,8 @@ const struct gcc_debug_hooks pdb_debug_hooks = {
   debug_nothing_int_charstar,  /* undef */
   debug_nothing_int_charstar,  /* start_source_file */
   debug_nothing_int,           /* end_source_file */
-  debug_nothing_int_int,       /* begin_block */
-  debug_nothing_int_int,       /* end_block */
+  pdbout_begin_block,
+  pdbout_end_block,
   debug_true_const_tree,       /* ignore_block */
   debug_nothing_int_int_charstar_int_bool,     /* source_line */
   pdbout_begin_prologue,
@@ -84,7 +96,7 @@ const struct gcc_debug_hooks pdb_debug_hooks = {
   pdbout_begin_function,
   debug_nothing_int,           /* end_function */
   debug_nothing_tree,          /* register_main_translation_unit */
-  debug_nothing_tree,          /* function_decl */
+  pdbout_function_decl,
   debug_nothing_tree,          /* early_global_decl */
   pdbout_late_global_decl,
   debug_nothing_tree_int,      /* type_decl */
@@ -122,6 +134,198 @@ pdbout_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
   fprintf (asm_out_file, FUNC_END_LABEL "%u:\n", current_function_funcdef_no);
 }
 
+/* Output the information as to where to a local variable can be found. */
+static void
+pdbout_local_variable (struct pdb_local_var *v)
+{
+  uint16_t len, align;
+  size_t name_len = strlen (v->name);
+
+  switch (v->var_type)
+    {
+    case pdb_local_var_regrel:
+      if (v->reg == CV_X86_EBP) // ebp is a special case
+       {
+         len = 13 + name_len;
+
+         if (len % 4 != 0)
+           {
+             align = 4 - (len % 4);
+             len += 4 - (len % 4);
+           }
+         else
+           align = 0;
+
+         /* Output BPRELSYM32 struct */
+
+         fprintf (asm_out_file, "\t.short\t0x%x\n",
+                  (uint16_t) (len - sizeof (uint16_t)));       // reclen
+         fprintf (asm_out_file, "\t.short\t0x%x\n", S_BPREL32);
+         fprintf (asm_out_file, "\t.long\t0x%x\n", v->offset);
+         fprintf (asm_out_file, "\t.long\t0x%x\n",
+                  v->type ? v->type->id : 0);
+
+         ASM_OUTPUT_ASCII (asm_out_file, v->name, name_len + 1);
+       }
+      else
+       {
+         len = 15 + name_len;
+
+         if (len % 4 != 0)
+           {
+             align = 4 - (len % 4);
+             len += 4 - (len % 4);
+           }
+         else
+           align = 0;
+
+         /* Output REGREL32 struct */
+
+         fprintf (asm_out_file, "\t.short\t0x%x\n",
+                  (uint16_t) (len - sizeof (uint16_t)));       // reclen
+         fprintf (asm_out_file, "\t.short\t0x%x\n", S_REGREL32);
+         fprintf (asm_out_file, "\t.long\t0x%x\n", v->offset);
+         fprintf (asm_out_file, "\t.long\t0x%x\n",
+                  v->type ? v->type->id : 0);
+         fprintf (asm_out_file, "\t.short\t0x%x\n", v->reg);
+
+         ASM_OUTPUT_ASCII (asm_out_file, v->name, name_len + 1);
+       }
+
+      for (unsigned int i = 0; i < align; i++)
+       {
+         fprintf (asm_out_file, "\t.byte\t0\n");
+       }
+      break;
+
+    case pdb_local_var_register:
+      len = 11 + name_len;
+
+      if (len % 4 != 0)
+       {
+         align = 4 - (len % 4);
+         len += 4 - (len % 4);
+       }
+      else
+       align = 0;
+
+      /* Output REGSYM struct */
+
+      fprintf (asm_out_file, "\t.short\t0x%x\n",
+              (uint16_t) (len - sizeof (uint16_t)));   // reclen
+      fprintf (asm_out_file, "\t.short\t0x%x\n", S_REGISTER);
+      fprintf (asm_out_file, "\t.long\t0x%x\n",
+              v->type ? v->type->id : 0);
+      fprintf (asm_out_file, "\t.short\t0x%x\n", v->reg);
+
+      ASM_OUTPUT_ASCII (asm_out_file, v->name, name_len + 1);
+
+      for (unsigned int i = 0; i < align; i++)
+       {
+         fprintf (asm_out_file, "\t.byte\t0\n");
+       }
+      break;
+
+    case pdb_local_var_symbol:
+      len = 15 + name_len;
+
+      if (len % 4 != 0)
+       {
+         align = 4 - (len % 4);
+         len += 4 - (len % 4);
+       }
+      else
+       align = 0;
+
+      /* Output DATASYM32 struct */
+
+      fprintf (asm_out_file, "\t.short\t0x%x\n",
+              (uint16_t) (len - sizeof (uint16_t)));   // reclen
+      fprintf (asm_out_file, "\t.short\t0x%x\n", S_LDATA32);
+      fprintf (asm_out_file, "\t.short\t0x%x\n",
+              v->type ? v->type->id : 0);
+      fprintf (asm_out_file, "\t.short\t0\n");
+
+      fprintf (asm_out_file, "\t.secrel32\t"); // offset
+      ASM_OUTPUT_LABELREF (asm_out_file, v->symbol);
+      fprintf (asm_out_file, "\n");
+
+      fprintf (asm_out_file, "\t.secidx\t");   // section
+      ASM_OUTPUT_LABELREF (asm_out_file, v->symbol);
+      fprintf (asm_out_file, "\n");
+
+      ASM_OUTPUT_ASCII (asm_out_file, v->name, name_len + 1);
+
+      for (unsigned int i = 0; i < align; i++)
+       {
+         fprintf (asm_out_file, "\t.byte\t0\n");
+       }
+      break;
+
+    default:
+      break;
+    }
+}
+
+/* Output BLOCKSYM32 structure, describing block-level scope
+ * for the purpose of local variables. */
+static void
+pdbout_block (struct pdb_block *block, struct pdb_func *func)
+{
+  struct pdb_local_var *local_var = func->local_vars;
+
+  while (local_var)
+    {
+      if (local_var->block_num == block->num)
+       pdbout_local_variable (local_var);
+
+      local_var = local_var->next;
+    }
+
+  while (block->children)
+    {
+      struct pdb_block *n = block->children->next;
+
+      fprintf (asm_out_file, ".Lcvblockstart%u:\n", block->children->num);
+      fprintf (asm_out_file, "\t.short\t0x16\n");      // reclen
+      fprintf (asm_out_file, "\t.short\t0x%x\n", S_BLOCK32);
+
+      // pParent
+      if (block->num != 0)
+       {
+         fprintf (asm_out_file, "\t.long\t[.Lcvblockstart%u]-[.debug$S]\n",
+                  block->num);
+       }
+      else
+       {
+         fprintf (asm_out_file, "\t.long\t[.Lcvprocstart%u]-[.debug$S]\n",
+                  func->num);
+       }
+
+      fprintf (asm_out_file, "\t.long\t[.Lcvblockend%u]-[.debug$S]\n",
+              block->children->num);   // pEnd
+      fprintf (asm_out_file, "\t.long\t[.Lblockend%u]-[.Lblockstart%u]\n",
+              block->children->num, block->children->num);     // length
+      fprintf (asm_out_file, "\t.secrel32\t.Lblockstart%u\n",
+              block->children->num);   // offset
+      fprintf (asm_out_file, "\t.secidx\t.Lblockstart%u\n",
+              block->children->num);   // offset
+
+      fprintf (asm_out_file, "\t.byte\t0\n");  // name (zero-length string)
+      fprintf (asm_out_file, "\t.byte\t0\n");  // padding
+
+      pdbout_block (block->children, func);
+
+      fprintf (asm_out_file, ".Lcvblockend%u:\n", block->children->num);
+      fprintf (asm_out_file, "\t.short\t0x2\n");
+      fprintf (asm_out_file, "\t.short\t0x%x\n", S_END);
+
+      free (block->children);
+
+      block->children = n;
+    }
+}
+
 /* Output PROCSYM32 structure, which describes a global function (S_GPROC32)
  * or a local (i.e. static) one (S_LPROC32). */
 static void
@@ -174,12 +378,26 @@ pdbout_proc32 (struct pdb_func *func)
       fprintf (asm_out_file, "\t.byte\t0\n");
     }
 
+  pdbout_block (&func->block, func);
+
   // end procedure
 
   fprintf (asm_out_file, ".Lcvprocend%u:\n", func->num);
 
   fprintf (asm_out_file, "\t.short\t0x2\n");
   fprintf (asm_out_file, "\t.short\t0x%x\n", S_END);
+
+  while (func->local_vars)
+    {
+      struct pdb_local_var *n = func->local_vars->next;
+
+      if (func->local_vars->symbol)
+       free (func->local_vars->symbol);
+
+      free (func->local_vars);
+
+      func->local_vars = n;
+    }
 }
 
 /* Output DATASYM32 structure, describing a global variable: either
@@ -312,10 +530,17 @@ pdbout_begin_function (tree func)
   f->num = current_function_funcdef_no;
   f->public_flag = TREE_PUBLIC (func);
   f->type = find_type (TREE_TYPE (func));
+  f->local_vars = f->last_local_var = NULL;
+
+  f->block.next = NULL;
+  f->block.parent = NULL;
+  f->block.num = 0;
+  f->block.children = f->block.last_child = NULL;
 
   funcs = f;
 
   cur_func = f;
+  cur_block = &f->block;
 }
 
 /* We've been passed a late global declaration, i.e. a global variable -
@@ -624,3 +849,1044 @@ find_type (tree t)
 
     return NULL;
 }
+
+/* Given an x86 gcc register no., return the CodeView equivalent. */
+static enum pdb_x86_register
+map_register_no_x86 (unsigned int regno, machine_mode mode)
+{
+  switch (mode)
+    {
+    case E_SImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_X86_EAX;
+
+       case DX_REG:
+         return CV_X86_EDX;
+
+       case CX_REG:
+         return CV_X86_ECX;
+
+       case BX_REG:
+         return CV_X86_EBX;
+
+       case SI_REG:
+         return CV_X86_ESI;
+
+       case DI_REG:
+         return CV_X86_EDI;
+
+       case BP_REG:
+         return CV_X86_EBP;
+
+       case SP_REG:
+         return CV_X86_ESP;
+
+       case FLAGS_REG:
+         return CV_X86_EFLAGS;
+       }
+
+      break;
+
+    case E_HImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_X86_AX;
+
+       case DX_REG:
+         return CV_X86_DX;
+
+       case CX_REG:
+         return CV_X86_CX;
+
+       case BX_REG:
+         return CV_X86_BX;
+
+       case SI_REG:
+         return CV_X86_SI;
+
+       case DI_REG:
+         return CV_X86_DI;
+
+       case BP_REG:
+         return CV_X86_BP;
+
+       case SP_REG:
+         return CV_X86_SP;
+
+       case FLAGS_REG:
+         return CV_X86_FLAGS;
+       }
+
+      break;
+
+    case E_QImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_X86_AL;
+
+       case DX_REG:
+         return CV_X86_DL;
+
+       case CX_REG:
+         return CV_X86_CL;
+
+       case BX_REG:
+         return CV_X86_BL;
+       }
+
+      break;
+
+    case E_SFmode:
+    case E_DFmode:
+      switch (regno)
+       {
+       case XMM0_REG:
+         return CV_X86_XMM0;
+
+       case XMM1_REG:
+         return CV_X86_XMM1;
+
+       case XMM2_REG:
+         return CV_X86_XMM2;
+
+       case XMM3_REG:
+         return CV_X86_XMM3;
+
+       case XMM4_REG:
+         return CV_X86_XMM4;
+
+       case XMM5_REG:
+         return CV_X86_XMM5;
+
+       case XMM6_REG:
+         return CV_X86_XMM6;
+
+       case XMM7_REG:
+         return CV_X86_XMM7;
+
+       case ST0_REG:
+         return CV_X86_ST0;
+
+       case ST1_REG:
+         return CV_X86_ST1;
+
+       case ST2_REG:
+         return CV_X86_ST2;
+
+       case ST3_REG:
+         return CV_X86_ST3;
+
+       case ST4_REG:
+         return CV_X86_ST4;
+
+       case ST5_REG:
+         return CV_X86_ST5;
+
+       case ST6_REG:
+         return CV_X86_ST6;
+
+       case ST7_REG:
+         return CV_X86_ST7;
+       }
+
+      break;
+
+    case E_DImode:
+      /* Suppress warning for 64-bit pseudo-registers on x86, e.g. an 8-byte
+       * struct put in ecx:edx. Not representible with CodeView? */
+      return CV_X86_NONE;
+
+    default:
+      break;
+    }
+
+  warning (0, "could not map x86 register %u, mode %u to CodeView constant",
+          regno, mode);
+
+  return CV_X86_NONE;
+}
+
+/* Given an amd64 gcc register no., return the CodeView equivalent. */
+static enum pdb_amd64_register
+map_register_no_amd64 (unsigned int regno, machine_mode mode)
+{
+  switch (mode)
+    {
+    case E_SImode:
+    case E_SFmode:
+    case E_SDmode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_AMD64_EAX;
+
+       case DX_REG:
+         return CV_AMD64_EDX;
+
+       case CX_REG:
+         return CV_AMD64_ECX;
+
+       case BX_REG:
+         return CV_AMD64_EBX;
+
+       case SI_REG:
+         return CV_AMD64_ESI;
+
+       case DI_REG:
+         return CV_AMD64_EDI;
+
+       case BP_REG:
+         return CV_AMD64_EBP;
+
+       case SP_REG:
+         return CV_AMD64_ESP;
+
+       case FLAGS_REG:
+         return CV_AMD64_EFLAGS;
+
+       case R8_REG:
+         return CV_AMD64_R8D;
+
+       case R9_REG:
+         return CV_AMD64_R9D;
+
+       case R10_REG:
+         return CV_AMD64_R10D;
+
+       case R11_REG:
+         return CV_AMD64_R11D;
+
+       case R12_REG:
+         return CV_AMD64_R12D;
+
+       case R13_REG:
+         return CV_AMD64_R13D;
+
+       case R14_REG:
+         return CV_AMD64_R14D;
+
+       case R15_REG:
+         return CV_AMD64_R15D;
+
+       case XMM0_REG:
+         return CV_AMD64_XMM0_0;
+
+       case XMM1_REG:
+         return CV_AMD64_XMM1_0;
+
+       case XMM2_REG:
+         return CV_AMD64_XMM2_0;
+
+       case XMM3_REG:
+         return CV_AMD64_XMM3_0;
+
+       case XMM4_REG:
+         return CV_AMD64_XMM4_0;
+
+       case XMM5_REG:
+         return CV_AMD64_XMM5_0;
+
+       case XMM6_REG:
+         return CV_AMD64_XMM6_0;
+
+       case XMM7_REG:
+         return CV_AMD64_XMM7_0;
+
+       case XMM8_REG:
+         return CV_AMD64_XMM8_0;
+
+       case XMM9_REG:
+         return CV_AMD64_XMM9_0;
+
+       case XMM10_REG:
+         return CV_AMD64_XMM10_0;
+
+       case XMM11_REG:
+         return CV_AMD64_XMM11_0;
+
+       case XMM12_REG:
+         return CV_AMD64_XMM12_0;
+
+       case XMM13_REG:
+         return CV_AMD64_XMM13_0;
+
+       case XMM14_REG:
+         return CV_AMD64_XMM14_0;
+
+       case XMM15_REG:
+         return CV_AMD64_XMM15_0;
+       }
+
+      break;
+
+    case E_DImode:
+    case E_DDmode:
+    case E_DFmode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_AMD64_RAX;
+
+       case DX_REG:
+         return CV_AMD64_RDX;
+
+       case CX_REG:
+         return CV_AMD64_RCX;
+
+       case BX_REG:
+         return CV_AMD64_RBX;
+
+       case SI_REG:
+         return CV_AMD64_RSI;
+
+       case DI_REG:
+         return CV_AMD64_RDI;
+
+       case BP_REG:
+         return CV_AMD64_RBP;
+
+       case SP_REG:
+         return CV_AMD64_RSP;
+
+       case R8_REG:
+         return CV_AMD64_R8;
+
+       case R9_REG:
+         return CV_AMD64_R9;
+
+       case R10_REG:
+         return CV_AMD64_R10;
+
+       case R11_REG:
+         return CV_AMD64_R11;
+
+       case R12_REG:
+         return CV_AMD64_R12;
+
+       case R13_REG:
+         return CV_AMD64_R13;
+
+       case R14_REG:
+         return CV_AMD64_R14;
+
+       case R15_REG:
+         return CV_AMD64_R15;
+
+       case XMM0_REG:
+         return CV_AMD64_XMM0L;
+
+       case XMM1_REG:
+         return CV_AMD64_XMM1L;
+
+       case XMM2_REG:
+         return CV_AMD64_XMM2L;
+
+       case XMM3_REG:
+         return CV_AMD64_XMM3L;
+
+       case XMM4_REG:
+         return CV_AMD64_XMM4L;
+
+       case XMM5_REG:
+         return CV_AMD64_XMM5L;
+
+       case XMM6_REG:
+         return CV_AMD64_XMM6L;
+
+       case XMM7_REG:
+         return CV_AMD64_XMM7L;
+
+       case XMM8_REG:
+         return CV_AMD64_XMM8L;
+
+       case XMM9_REG:
+         return CV_AMD64_XMM9L;
+
+       case XMM10_REG:
+         return CV_AMD64_XMM10L;
+
+       case XMM11_REG:
+         return CV_AMD64_XMM11L;
+
+       case XMM12_REG:
+         return CV_AMD64_XMM12L;
+
+       case XMM13_REG:
+         return CV_AMD64_XMM13L;
+
+       case XMM14_REG:
+         return CV_AMD64_XMM14L;
+
+       case XMM15_REG:
+         return CV_AMD64_XMM15L;
+       }
+
+      break;
+
+    case E_TImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_AMD64_RAX;
+
+       case DX_REG:
+         return CV_AMD64_RDX;
+
+       case CX_REG:
+         return CV_AMD64_RCX;
+
+       case BX_REG:
+         return CV_AMD64_RBX;
+
+       case SI_REG:
+         return CV_AMD64_RSI;
+
+       case DI_REG:
+         return CV_AMD64_RDI;
+
+       case BP_REG:
+         return CV_AMD64_RBP;
+
+       case SP_REG:
+         return CV_AMD64_RSP;
+
+       case R8_REG:
+         return CV_AMD64_R8;
+
+       case R9_REG:
+         return CV_AMD64_R9;
+
+       case R10_REG:
+         return CV_AMD64_R10;
+
+       case R11_REG:
+         return CV_AMD64_R11;
+
+       case R12_REG:
+         return CV_AMD64_R12;
+
+       case R13_REG:
+         return CV_AMD64_R13;
+
+       case R14_REG:
+         return CV_AMD64_R14;
+
+       case R15_REG:
+         return CV_AMD64_R15;
+
+       case XMM0_REG:
+         return CV_AMD64_XMM0;
+
+       case XMM1_REG:
+         return CV_AMD64_XMM1;
+
+       case XMM2_REG:
+         return CV_AMD64_XMM2;
+
+       case XMM3_REG:
+         return CV_AMD64_XMM3;
+
+       case XMM4_REG:
+         return CV_AMD64_XMM4;
+
+       case XMM5_REG:
+         return CV_AMD64_XMM5;
+
+       case XMM6_REG:
+         return CV_AMD64_XMM6;
+
+       case XMM7_REG:
+         return CV_AMD64_XMM7;
+
+       case XMM8_REG:
+         return CV_AMD64_XMM8;
+
+       case XMM9_REG:
+         return CV_AMD64_XMM9;
+
+       case XMM10_REG:
+         return CV_AMD64_XMM10;
+
+       case XMM11_REG:
+         return CV_AMD64_XMM11;
+
+       case XMM12_REG:
+         return CV_AMD64_XMM12;
+
+       case XMM13_REG:
+         return CV_AMD64_XMM13;
+
+       case XMM14_REG:
+         return CV_AMD64_XMM14;
+
+       case XMM15_REG:
+         return CV_AMD64_XMM15;
+       }
+
+      break;
+
+    case E_HImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_AMD64_AX;
+
+       case DX_REG:
+         return CV_AMD64_DX;
+
+       case CX_REG:
+         return CV_AMD64_CX;
+
+       case BX_REG:
+         return CV_AMD64_BX;
+
+       case SI_REG:
+         return CV_AMD64_SI;
+
+       case DI_REG:
+         return CV_AMD64_DI;
+
+       case BP_REG:
+         return CV_AMD64_BP;
+
+       case SP_REG:
+         return CV_AMD64_SP;
+
+       case FLAGS_REG:
+         return CV_AMD64_FLAGS;
+
+       case R8_REG:
+         return CV_AMD64_R8W;
+
+       case R9_REG:
+         return CV_AMD64_R9W;
+
+       case R10_REG:
+         return CV_AMD64_R10W;
+
+       case R11_REG:
+         return CV_AMD64_R11W;
+
+       case R12_REG:
+         return CV_AMD64_R12W;
+
+       case R13_REG:
+         return CV_AMD64_R13W;
+
+       case R14_REG:
+         return CV_AMD64_R14W;
+
+       case R15_REG:
+         return CV_AMD64_R15W;
+       }
+
+      break;
+
+    case E_QImode:
+      switch (regno)
+       {
+       case AX_REG:
+         return CV_AMD64_AL;
+
+       case DX_REG:
+         return CV_AMD64_DL;
+
+       case CX_REG:
+         return CV_AMD64_CL;
+
+       case BX_REG:
+         return CV_AMD64_BL;
+
+       case SI_REG:
+         return CV_AMD64_SIL;
+
+       case DI_REG:
+         return CV_AMD64_DIL;
+
+       case BP_REG:
+         return CV_AMD64_BPL;
+
+       case SP_REG:
+         return CV_AMD64_SPL;
+
+       case R8_REG:
+         return CV_AMD64_R8B;
+
+       case R9_REG:
+         return CV_AMD64_R9B;
+
+       case R10_REG:
+         return CV_AMD64_R10B;
+
+       case R11_REG:
+         return CV_AMD64_R11B;
+
+       case R12_REG:
+         return CV_AMD64_R12B;
+
+       case R13_REG:
+         return CV_AMD64_R13B;
+
+       case R14_REG:
+         return CV_AMD64_R14B;
+
+       case R15_REG:
+         return CV_AMD64_R15B;
+       }
+
+      break;
+
+    case E_TFmode:
+      switch (regno)
+       {
+       case XMM0_REG:
+         return CV_AMD64_XMM0;
+
+       case XMM1_REG:
+         return CV_AMD64_XMM1;
+
+       case XMM2_REG:
+         return CV_AMD64_XMM2;
+
+       case XMM3_REG:
+         return CV_AMD64_XMM3;
+
+       case XMM4_REG:
+         return CV_AMD64_XMM4;
+
+       case XMM5_REG:
+         return CV_AMD64_XMM5;
+
+       case XMM6_REG:
+         return CV_AMD64_XMM6;
+
+       case XMM7_REG:
+         return CV_AMD64_XMM7;
+
+       case XMM8_REG:
+         return CV_AMD64_XMM8;
+
+       case XMM9_REG:
+         return CV_AMD64_XMM9;
+
+       case XMM10_REG:
+         return CV_AMD64_XMM10;
+
+       case XMM11_REG:
+         return CV_AMD64_XMM11;
+
+       case XMM12_REG:
+         return CV_AMD64_XMM12;
+
+       case XMM13_REG:
+         return CV_AMD64_XMM13;
+
+       case XMM14_REG:
+         return CV_AMD64_XMM14;
+
+       case XMM15_REG:
+         return CV_AMD64_XMM15;
+       }
+
+      break;
+
+    case E_XFmode:
+      switch (regno)
+       {
+       case ST0_REG:
+         return CV_AMD64_ST0;
+
+       case ST1_REG:
+         return CV_AMD64_ST1;
+
+       case ST2_REG:
+         return CV_AMD64_ST2;
+
+       case ST3_REG:
+         return CV_AMD64_ST3;
+
+       case ST4_REG:
+         return CV_AMD64_ST4;
+
+       case ST5_REG:
+         return CV_AMD64_ST5;
+
+       case ST6_REG:
+         return CV_AMD64_ST6;
+
+       case ST7_REG:
+         return CV_AMD64_ST7;
+
+       case AX_REG:
+         return CV_AMD64_RAX;
+
+       case DX_REG:
+         return CV_AMD64_RDX;
+
+       case CX_REG:
+         return CV_AMD64_RCX;
+
+       case BX_REG:
+         return CV_AMD64_RBX;
+
+       case SI_REG:
+         return CV_AMD64_RSI;
+
+       case DI_REG:
+         return CV_AMD64_RDI;
+
+       case BP_REG:
+         return CV_AMD64_RBP;
+
+       case SP_REG:
+         return CV_AMD64_RSP;
+
+       case R8_REG:
+         return CV_AMD64_R8;
+
+       case R9_REG:
+         return CV_AMD64_R9;
+
+       case R10_REG:
+         return CV_AMD64_R10;
+
+       case R11_REG:
+         return CV_AMD64_R11;
+
+       case R12_REG:
+         return CV_AMD64_R12;
+
+       case R13_REG:
+         return CV_AMD64_R13;
+
+       case R14_REG:
+         return CV_AMD64_R14;
+
+       case R15_REG:
+         return CV_AMD64_R15;
+       }
+
+      break;
+
+    default:
+      break;
+    }
+
+  warning (0, "could not map amd64 register %u, mode %u to CodeView constant",
+          regno, mode);
+
+  return CV_AMD64_NONE;
+}
+
+/* Map a gcc register constant to its CodeView equivalent. */
+static unsigned int
+map_register_no (unsigned int regno, machine_mode mode)
+{
+  if (regno >= FIRST_PSEUDO_REGISTER)
+    return 0;
+
+  if (TARGET_64BIT)
+    return (unsigned int) map_register_no_amd64 (regno, mode);
+  else
+    return (unsigned int) map_register_no_x86 (regno, mode);
+}
+
+/* We can't rely on eliminate_regs for stack offsets - it seems that some
+ * compiler passes alter the stack without changing the values in the
+ * reg_eliminate array that eliminate_regs relies on. */
+static int32_t
+fix_variable_offset (rtx orig_rtl, unsigned int reg, int32_t offset)
+{
+  if (!TARGET_64BIT)
+    {
+      if (reg == CV_X86_EBP)
+       {
+         if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+             REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == ARGP_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == ARGP_REG)
+           return cfun->machine->frame.hard_frame_pointer_offset;
+         else if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+                  REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset;
+           }
+       }
+      else if (reg == CV_X86_ESP)
+       {
+         if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+             REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == ARGP_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == ARGP_REG)
+           return cfun->machine->frame.stack_pointer_offset;
+         else if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+                  REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset;
+           }
+       }
+    }
+  else
+    {
+      if (reg == CV_AMD64_RBP)
+       {
+         if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+             REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == ARGP_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == ARGP_REG)
+           return cfun->machine->frame.hard_frame_pointer_offset;
+         else if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+                  REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.hard_frame_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset;
+           }
+       }
+      else if (reg == CV_AMD64_RSP)
+       {
+         if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+             GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+             REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == ARGP_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == ARGP_REG)
+           return cfun->machine->frame.stack_pointer_offset;
+         else if (GET_CODE (XEXP (orig_rtl, 0)) == PLUS &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 0)) == REG &&
+                  GET_CODE (XEXP (XEXP (orig_rtl, 0), 1)) == CONST_INT &&
+                  REGNO (XEXP (XEXP (orig_rtl, 0), 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset +
+               XINT (XEXP (XEXP (orig_rtl, 0), 1), 0);
+           }
+         else if (REG_P (XEXP (orig_rtl, 0))
+                  && REGNO (XEXP (orig_rtl, 0)) == FRAME_REG)
+           {
+             return cfun->machine->frame.stack_pointer_offset -
+               cfun->machine->frame.frame_pointer_offset;
+           }
+       }
+    }
+
+  return offset;
+}
+
+/* We've been given a declaration for a local variable. Allocate a
+ * pdb_local_var and add it to the list for this scope block. */
+static void
+add_local (const char *name, tree t, struct pdb_type *type, rtx orig_rtl,
+          unsigned int block_num)
+{
+  struct pdb_local_var *plv;
+  size_t name_len = strlen (name);
+  rtx rtl;
+
+  plv =
+    (struct pdb_local_var *) xmalloc (offsetof (struct pdb_local_var, name) +
+                                     name_len + 1);
+  plv->next = NULL;
+  plv->type = type;
+  plv->symbol = NULL;
+  plv->t = t;
+  plv->block_num = block_num;
+  plv->var_type = pdb_local_var_unknown;
+  memcpy (plv->name, name, name_len + 1);
+
+  rtl = eliminate_regs (orig_rtl, VOIDmode, NULL_RTX);
+
+  if (MEM_P (rtl))
+    {
+      if (GET_CODE (XEXP (rtl, 0)) == PLUS
+         && GET_CODE (XEXP (XEXP (rtl, 0), 0)) == REG
+         && GET_CODE (XEXP (XEXP (rtl, 0), 1)) == CONST_INT)
+       {
+         plv->var_type = pdb_local_var_regrel;
+         plv->reg =
+           map_register_no (REGNO (XEXP (XEXP (rtl, 0), 0)),
+                            GET_MODE (XEXP (XEXP (rtl, 0), 0)));
+         plv->offset = XINT (XEXP (XEXP (rtl, 0), 1), 0);
+       }
+      else if (REG_P (XEXP (rtl, 0)))
+       {
+         plv->var_type = pdb_local_var_regrel;
+         plv->reg =
+           map_register_no (REGNO (XEXP (rtl, 0)), GET_MODE (XEXP (rtl, 0)));
+         plv->offset = 0;
+       }
+      else if (SYMBOL_REF_P (XEXP (rtl, 0)))
+       {
+         plv->var_type = pdb_local_var_symbol;
+         plv->symbol = xstrdup (XSTR (XEXP (rtl, 0), 0));
+       }
+    }
+  else if (REG_P (rtl))
+    {
+      plv->var_type = pdb_local_var_register;
+      plv->reg = map_register_no (REGNO (rtl), GET_MODE (rtl));
+    }
+
+  if (plv->var_type == pdb_local_var_regrel)
+    plv->offset = fix_variable_offset (orig_rtl, plv->reg, plv->offset);
+
+  if (cur_func->last_local_var)
+    cur_func->last_local_var->next = plv;
+
+  cur_func->last_local_var = plv;
+
+  if (!cur_func->local_vars)
+    cur_func->local_vars = plv;
+}
+
+/* We've encountered a scope block within a function - loop through and
+ * add any function declarations, then call recursively for any
+ * sub-blocks. */
+static void
+pdbout_function_decl_block (tree block)
+{
+  tree f;
+
+  f = BLOCK_VARS (block);
+  while (f)
+    {
+      if (TREE_CODE (f) == VAR_DECL && DECL_RTL_SET_P (f) && DECL_NAME (f))
+       {
+         struct pdb_type *type = find_type (TREE_TYPE (f));
+
+         add_local (IDENTIFIER_POINTER (DECL_NAME (f)), f,
+                    type, DECL_RTL (f), BLOCK_NUMBER (block));
+       }
+
+      f = TREE_CHAIN (f);
+    }
+
+  f = BLOCK_SUBBLOCKS (block);
+  while (f)
+    {
+      pdbout_function_decl_block (f);
+
+      f = BLOCK_CHAIN (f);
+    }
+}
+
+/* We've encountered a function declaration. Add the parameters as local
+ * variables, then loop through and add its scope blocks. */
+static void
+pdbout_function_decl (tree decl)
+{
+  tree f;
+
+  if (!cur_func)
+    return;
+
+  f = DECL_ARGUMENTS (decl);
+  while (f)
+    {
+      if (TREE_CODE (f) == PARM_DECL && DECL_NAME (f))
+       {
+         struct pdb_type *type = find_type (TREE_TYPE (f));
+
+         add_local (IDENTIFIER_POINTER (DECL_NAME (f)), f,
+                    type, DECL_RTL (f), 0);
+       }
+
+      f = TREE_CHAIN (f);
+    }
+
+  pdbout_function_decl_block (DECL_INITIAL (decl));
+
+  cur_func = NULL;
+  cur_block = NULL;
+}
+
+/* We've encountered the start of a scope block - output an asm label so
+ * it can be referred to elsewhere. */
+static void
+pdbout_begin_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
+{
+  struct pdb_block *b;
+
+  fprintf (asm_out_file, ".Lblockstart%u:\n", blocknum);
+
+  b = (struct pdb_block *) xmalloc (sizeof (pdb_block));
+
+  if (cur_block->last_child)
+    cur_block->last_child->next = b;
+
+  cur_block->last_child = b;
+
+  if (!cur_block->children)
+    cur_block->children = b;
+
+  b->parent = cur_block;
+  b->num = blocknum;
+  b->children = b->last_child = NULL;
+  b->next = NULL;
+
+  cur_block = b;
+}
+
+/* We've encountered the end of a scope block - output an asm label so
+ * it can be referred to elsewhere. */
+static void
+pdbout_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
+{
+  fprintf (asm_out_file, ".Lblockend%u:\n", blocknum);
+
+  cur_block = cur_block->parent;
+}
diff --git a/gcc/pdbout.h b/gcc/pdbout.h
index 85a1eb548cb..782e8d0faa1 100644
--- a/gcc/pdbout.h
+++ b/gcc/pdbout.h
@@ -21,16 +21,49 @@
 #define GCC_PDBOUT_H 1
 
 #define S_END                          0x0006
+#define S_BLOCK32                      0x1103
+#define S_REGISTER                     0x1106
+#define S_BPREL32                      0x110b
 #define S_LDATA32                      0x110c
 #define S_GDATA32                      0x110d
 #define S_LPROC32                      0x110f
 #define S_GPROC32                      0x1110
+#define S_REGREL32                     0x1111
 
 /* Format version as of MSVC 7 */
 #define CV_SIGNATURE_C13       4
 
 #define DEBUG_S_SYMBOLS                        0xf1
 
+enum pdb_local_var_type
+{
+  pdb_local_var_unknown,
+  pdb_local_var_regrel,
+  pdb_local_var_register,
+  pdb_local_var_symbol
+};
+
+struct pdb_local_var
+{
+  struct pdb_local_var *next;
+  enum pdb_local_var_type var_type;
+  tree t;
+  int32_t offset;
+  unsigned int reg;
+  unsigned int block_num;
+  struct pdb_type *type;
+  char *symbol;
+  char name[1];
+};
+
+struct pdb_block
+{
+  struct pdb_block *next;
+  struct pdb_block *parent;
+  unsigned int num;
+  struct pdb_block *children, *last_child;
+};
+
 struct pdb_func
 {
   struct pdb_func *next;
@@ -38,6 +71,8 @@ struct pdb_func
   int num;
   unsigned int public_flag;
   struct pdb_type *type;
+  struct pdb_local_var *local_vars, *last_local_var;
+  struct pdb_block block;
 };
 
 struct pdb_global_var
@@ -121,4 +156,938 @@ struct pdb_type_tree_hasher : nofree_ptr_hash <struct 
pdb_type>
   static inline bool equal (const value_type, compare_type);
 };
 
+enum pdb_x86_register
+{
+  CV_X86_NONE = 0,
+  CV_X86_AL = 1,
+  CV_X86_CL = 2,
+  CV_X86_DL = 3,
+  CV_X86_BL = 4,
+  CV_X86_AH = 5,
+  CV_X86_CH = 6,
+  CV_X86_DH = 7,
+  CV_X86_BH = 8,
+  CV_X86_AX = 9,
+  CV_X86_CX = 10,
+  CV_X86_DX = 11,
+  CV_X86_BX = 12,
+  CV_X86_SP = 13,
+  CV_X86_BP = 14,
+  CV_X86_SI = 15,
+  CV_X86_DI = 16,
+  CV_X86_EAX = 17,
+  CV_X86_ECX = 18,
+  CV_X86_EDX = 19,
+  CV_X86_EBX = 20,
+  CV_X86_ESP = 21,
+  CV_X86_EBP = 22,
+  CV_X86_ESI = 23,
+  CV_X86_EDI = 24,
+  CV_X86_ES = 25,
+  CV_X86_CS = 26,
+  CV_X86_SS = 27,
+  CV_X86_DS = 28,
+  CV_X86_FS = 29,
+  CV_X86_GS = 30,
+  CV_X86_IP = 31,
+  CV_X86_FLAGS = 32,
+  CV_X86_EIP = 33,
+  CV_X86_EFLAGS = 34,
+  CV_X86_CR0 = 80,
+  CV_X86_CR1 = 81,
+  CV_X86_CR2 = 82,
+  CV_X86_CR3 = 83,
+  CV_X86_CR4 = 84,
+  CV_X86_DR0 = 90,
+  CV_X86_DR1 = 91,
+  CV_X86_DR2 = 92,
+  CV_X86_DR3 = 93,
+  CV_X86_DR4 = 94,
+  CV_X86_DR5 = 95,
+  CV_X86_DR6 = 96,
+  CV_X86_DR7 = 97,
+  CV_X86_GDTR = 110,
+  CV_X86_GDTL = 111,
+  CV_X86_IDTR = 112,
+  CV_X86_IDTL = 113,
+  CV_X86_LDTR = 114,
+  CV_X86_TR = 115,
+  CV_X86_ST0 = 128,
+  CV_X86_ST1 = 129,
+  CV_X86_ST2 = 130,
+  CV_X86_ST3 = 131,
+  CV_X86_ST4 = 132,
+  CV_X86_ST5 = 133,
+  CV_X86_ST6 = 134,
+  CV_X86_ST7 = 135,
+  CV_X86_CTRL = 136,
+  CV_X86_STAT = 137,
+  CV_X86_TAG = 138,
+  CV_X86_FPIP = 139,
+  CV_X86_FPCS = 140,
+  CV_X86_FPDO = 141,
+  CV_X86_FPDS = 142,
+  CV_X86_ISEM = 143,
+  CV_X86_FPEIP = 144,
+  CV_X86_FPEDO = 145,
+  CV_X86_MM0 = 146,
+  CV_X86_MM1 = 147,
+  CV_X86_MM2 = 148,
+  CV_X86_MM3 = 149,
+  CV_X86_MM4 = 150,
+  CV_X86_MM5 = 151,
+  CV_X86_MM6 = 152,
+  CV_X86_MM7 = 153,
+  CV_X86_XMM0 = 154,
+  CV_X86_XMM1 = 155,
+  CV_X86_XMM2 = 156,
+  CV_X86_XMM3 = 157,
+  CV_X86_XMM4 = 158,
+  CV_X86_XMM5 = 159,
+  CV_X86_XMM6 = 160,
+  CV_X86_XMM7 = 161,
+  CV_X86_XMM00 = 162,
+  CV_X86_XMM01 = 163,
+  CV_X86_XMM02 = 164,
+  CV_X86_XMM03 = 165,
+  CV_X86_XMM10 = 166,
+  CV_X86_XMM11 = 167,
+  CV_X86_XMM12 = 168,
+  CV_X86_XMM13 = 169,
+  CV_X86_XMM20 = 170,
+  CV_X86_XMM21 = 171,
+  CV_X86_XMM22 = 172,
+  CV_X86_XMM23 = 173,
+  CV_X86_XMM30 = 174,
+  CV_X86_XMM31 = 175,
+  CV_X86_XMM32 = 176,
+  CV_X86_XMM33 = 177,
+  CV_X86_XMM40 = 178,
+  CV_X86_XMM41 = 179,
+  CV_X86_XMM42 = 180,
+  CV_X86_XMM43 = 181,
+  CV_X86_XMM50 = 182,
+  CV_X86_XMM51 = 183,
+  CV_X86_XMM52 = 184,
+  CV_X86_XMM53 = 185,
+  CV_X86_XMM60 = 186,
+  CV_X86_XMM61 = 187,
+  CV_X86_XMM62 = 188,
+  CV_X86_XMM63 = 189,
+  CV_X86_XMM70 = 190,
+  CV_X86_XMM71 = 191,
+  CV_X86_XMM72 = 192,
+  CV_X86_XMM73 = 193,
+  CV_X86_XMM0L = 194,
+  CV_X86_XMM1L = 195,
+  CV_X86_XMM2L = 196,
+  CV_X86_XMM3L = 197,
+  CV_X86_XMM4L = 198,
+  CV_X86_XMM5L = 199,
+  CV_X86_XMM6L = 200,
+  CV_X86_XMM7L = 201,
+  CV_X86_XMM0H = 202,
+  CV_X86_XMM1H = 203,
+  CV_X86_XMM2H = 204,
+  CV_X86_XMM3H = 205,
+  CV_X86_XMM4H = 206,
+  CV_X86_XMM5H = 207,
+  CV_X86_XMM6H = 208,
+  CV_X86_XMM7H = 209,
+  CV_X86_MXCSR = 211,
+  CV_X86_EMM0L = 220,
+  CV_X86_EMM1L = 221,
+  CV_X86_EMM2L = 222,
+  CV_X86_EMM3L = 223,
+  CV_X86_EMM4L = 224,
+  CV_X86_EMM5L = 225,
+  CV_X86_EMM6L = 226,
+  CV_X86_EMM7L = 227,
+  CV_X86_EMM0H = 228,
+  CV_X86_EMM1H = 229,
+  CV_X86_EMM2H = 230,
+  CV_X86_EMM3H = 231,
+  CV_X86_EMM4H = 232,
+  CV_X86_EMM5H = 233,
+  CV_X86_EMM6H = 234,
+  CV_X86_EMM7H = 235,
+  CV_X86_MM00 = 236,
+  CV_X86_MM01 = 237,
+  CV_X86_MM10 = 238,
+  CV_X86_MM11 = 239,
+  CV_X86_MM20 = 240,
+  CV_X86_MM21 = 241,
+  CV_X86_MM30 = 242,
+  CV_X86_MM31 = 243,
+  CV_X86_MM40 = 244,
+  CV_X86_MM41 = 245,
+  CV_X86_MM50 = 246,
+  CV_X86_MM51 = 247,
+  CV_X86_MM60 = 248,
+  CV_X86_MM61 = 249,
+  CV_X86_MM70 = 250,
+  CV_X86_MM71 = 251,
+  CV_X86_YMM0 = 252,
+  CV_X86_YMM1 = 253,
+  CV_X86_YMM2 = 254,
+  CV_X86_YMM3 = 255,
+  CV_X86_YMM4 = 256,
+  CV_X86_YMM5 = 257,
+  CV_X86_YMM6 = 258,
+  CV_X86_YMM7 = 259,
+  CV_X86_YMM0H = 260,
+  CV_X86_YMM1H = 261,
+  CV_X86_YMM2H = 262,
+  CV_X86_YMM3H = 263,
+  CV_X86_YMM4H = 264,
+  CV_X86_YMM5H = 265,
+  CV_X86_YMM6H = 266,
+  CV_X86_YMM7H = 267,
+  CV_X86_YMM0I0 = 268,
+  CV_X86_YMM0I1 = 269,
+  CV_X86_YMM0I2 = 270,
+  CV_X86_YMM0I3 = 271,
+  CV_X86_YMM1I0 = 272,
+  CV_X86_YMM1I1 = 273,
+  CV_X86_YMM1I2 = 274,
+  CV_X86_YMM1I3 = 275,
+  CV_X86_YMM2I0 = 276,
+  CV_X86_YMM2I1 = 277,
+  CV_X86_YMM2I2 = 278,
+  CV_X86_YMM2I3 = 279,
+  CV_X86_YMM3I0 = 280,
+  CV_X86_YMM3I1 = 281,
+  CV_X86_YMM3I2 = 282,
+  CV_X86_YMM3I3 = 283,
+  CV_X86_YMM4I0 = 284,
+  CV_X86_YMM4I1 = 285,
+  CV_X86_YMM4I2 = 286,
+  CV_X86_YMM4I3 = 287,
+  CV_X86_YMM5I0 = 288,
+  CV_X86_YMM5I1 = 289,
+  CV_X86_YMM5I2 = 290,
+  CV_X86_YMM5I3 = 291,
+  CV_X86_YMM6I0 = 292,
+  CV_X86_YMM6I1 = 293,
+  CV_X86_YMM6I2 = 294,
+  CV_X86_YMM6I3 = 295,
+  CV_X86_YMM7I0 = 296,
+  CV_X86_YMM7I1 = 297,
+  CV_X86_YMM7I2 = 298,
+  CV_X86_YMM7I3 = 299,
+  CV_X86_YMM0F0 = 300,
+  CV_X86_YMM0F1 = 301,
+  CV_X86_YMM0F2 = 302,
+  CV_X86_YMM0F3 = 303,
+  CV_X86_YMM0F4 = 304,
+  CV_X86_YMM0F5 = 305,
+  CV_X86_YMM0F6 = 306,
+  CV_X86_YMM0F7 = 307,
+  CV_X86_YMM1F0 = 308,
+  CV_X86_YMM1F1 = 309,
+  CV_X86_YMM1F2 = 310,
+  CV_X86_YMM1F3 = 311,
+  CV_X86_YMM1F4 = 312,
+  CV_X86_YMM1F5 = 313,
+  CV_X86_YMM1F6 = 314,
+  CV_X86_YMM1F7 = 315,
+  CV_X86_YMM2F0 = 316,
+  CV_X86_YMM2F1 = 317,
+  CV_X86_YMM2F2 = 318,
+  CV_X86_YMM2F3 = 319,
+  CV_X86_YMM2F4 = 320,
+  CV_X86_YMM2F5 = 321,
+  CV_X86_YMM2F6 = 322,
+  CV_X86_YMM2F7 = 323,
+  CV_X86_YMM3F0 = 324,
+  CV_X86_YMM3F1 = 325,
+  CV_X86_YMM3F2 = 326,
+  CV_X86_YMM3F3 = 327,
+  CV_X86_YMM3F4 = 328,
+  CV_X86_YMM3F5 = 329,
+  CV_X86_YMM3F6 = 330,
+  CV_X86_YMM3F7 = 331,
+  CV_X86_YMM4F0 = 332,
+  CV_X86_YMM4F1 = 333,
+  CV_X86_YMM4F2 = 334,
+  CV_X86_YMM4F3 = 335,
+  CV_X86_YMM4F4 = 336,
+  CV_X86_YMM4F5 = 337,
+  CV_X86_YMM4F6 = 338,
+  CV_X86_YMM4F7 = 339,
+  CV_X86_YMM5F0 = 340,
+  CV_X86_YMM5F1 = 341,
+  CV_X86_YMM5F2 = 342,
+  CV_X86_YMM5F3 = 343,
+  CV_X86_YMM5F4 = 344,
+  CV_X86_YMM5F5 = 345,
+  CV_X86_YMM5F6 = 346,
+  CV_X86_YMM5F7 = 347,
+  CV_X86_YMM6F0 = 348,
+  CV_X86_YMM6F1 = 349,
+  CV_X86_YMM6F2 = 350,
+  CV_X86_YMM6F3 = 351,
+  CV_X86_YMM6F4 = 352,
+  CV_X86_YMM6F5 = 353,
+  CV_X86_YMM6F6 = 354,
+  CV_X86_YMM6F7 = 355,
+  CV_X86_YMM7F0 = 356,
+  CV_X86_YMM7F1 = 357,
+  CV_X86_YMM7F2 = 358,
+  CV_X86_YMM7F3 = 359,
+  CV_X86_YMM7F4 = 360,
+  CV_X86_YMM7F5 = 361,
+  CV_X86_YMM7F6 = 362,
+  CV_X86_YMM7F7 = 363,
+  CV_X86_YMM0D0 = 364,
+  CV_X86_YMM0D1 = 365,
+  CV_X86_YMM0D2 = 366,
+  CV_X86_YMM0D3 = 367,
+  CV_X86_YMM1D0 = 368,
+  CV_X86_YMM1D1 = 369,
+  CV_X86_YMM1D2 = 370,
+  CV_X86_YMM1D3 = 371,
+  CV_X86_YMM2D0 = 372,
+  CV_X86_YMM2D1 = 373,
+  CV_X86_YMM2D2 = 374,
+  CV_X86_YMM2D3 = 375,
+  CV_X86_YMM3D0 = 376,
+  CV_X86_YMM3D1 = 377,
+  CV_X86_YMM3D2 = 378,
+  CV_X86_YMM3D3 = 379,
+  CV_X86_YMM4D0 = 380,
+  CV_X86_YMM4D1 = 381,
+  CV_X86_YMM4D2 = 382,
+  CV_X86_YMM4D3 = 383,
+  CV_X86_YMM5D0 = 384,
+  CV_X86_YMM5D1 = 385,
+  CV_X86_YMM5D2 = 386,
+  CV_X86_YMM5D3 = 387,
+  CV_X86_YMM6D0 = 388,
+  CV_X86_YMM6D1 = 389,
+  CV_X86_YMM6D2 = 390,
+  CV_X86_YMM6D3 = 391,
+  CV_X86_YMM7D0 = 392,
+  CV_X86_YMM7D1 = 393,
+  CV_X86_YMM7D2 = 394,
+  CV_X86_YMM7D3 = 395,
+};
+
+enum pdb_amd64_register
+{
+  CV_AMD64_NONE = 0,
+  CV_AMD64_AL = 1,
+  CV_AMD64_CL = 2,
+  CV_AMD64_DL = 3,
+  CV_AMD64_BL = 4,
+  CV_AMD64_AH = 5,
+  CV_AMD64_CH = 6,
+  CV_AMD64_DH = 7,
+  CV_AMD64_BH = 8,
+  CV_AMD64_AX = 9,
+  CV_AMD64_CX = 10,
+  CV_AMD64_DX = 11,
+  CV_AMD64_BX = 12,
+  CV_AMD64_SP = 13,
+  CV_AMD64_BP = 14,
+  CV_AMD64_SI = 15,
+  CV_AMD64_DI = 16,
+  CV_AMD64_EAX = 17,
+  CV_AMD64_ECX = 18,
+  CV_AMD64_EDX = 19,
+  CV_AMD64_EBX = 20,
+  CV_AMD64_ESP = 21,
+  CV_AMD64_EBP = 22,
+  CV_AMD64_ESI = 23,
+  CV_AMD64_EDI = 24,
+  CV_AMD64_ES = 25,
+  CV_AMD64_CS = 26,
+  CV_AMD64_SS = 27,
+  CV_AMD64_DS = 28,
+  CV_AMD64_FS = 29,
+  CV_AMD64_GS = 30,
+  CV_AMD64_FLAGS = 32,
+  CV_AMD64_RIP = 33,
+  CV_AMD64_EFLAGS = 34,
+  CV_AMD64_CR0 = 80,
+  CV_AMD64_CR1 = 81,
+  CV_AMD64_CR2 = 82,
+  CV_AMD64_CR3 = 83,
+  CV_AMD64_CR4 = 84,
+  CV_AMD64_CR8 = 88,
+  CV_AMD64_DR0 = 90,
+  CV_AMD64_DR1 = 91,
+  CV_AMD64_DR2 = 92,
+  CV_AMD64_DR3 = 93,
+  CV_AMD64_DR4 = 94,
+  CV_AMD64_DR5 = 95,
+  CV_AMD64_DR6 = 96,
+  CV_AMD64_DR7 = 97,
+  CV_AMD64_DR8 = 98,
+  CV_AMD64_DR9 = 99,
+  CV_AMD64_DR10 = 100,
+  CV_AMD64_DR11 = 101,
+  CV_AMD64_DR12 = 102,
+  CV_AMD64_DR13 = 103,
+  CV_AMD64_DR14 = 104,
+  CV_AMD64_DR15 = 105,
+  CV_AMD64_GDTR = 110,
+  CV_AMD64_GDTL = 111,
+  CV_AMD64_IDTR = 112,
+  CV_AMD64_IDTL = 113,
+  CV_AMD64_LDTR = 114,
+  CV_AMD64_TR = 115,
+  CV_AMD64_ST0 = 128,
+  CV_AMD64_ST1 = 129,
+  CV_AMD64_ST2 = 130,
+  CV_AMD64_ST3 = 131,
+  CV_AMD64_ST4 = 132,
+  CV_AMD64_ST5 = 133,
+  CV_AMD64_ST6 = 134,
+  CV_AMD64_ST7 = 135,
+  CV_AMD64_CTRL = 136,
+  CV_AMD64_STAT = 137,
+  CV_AMD64_TAG = 138,
+  CV_AMD64_FPIP = 139,
+  CV_AMD64_FPCS = 140,
+  CV_AMD64_FPDO = 141,
+  CV_AMD64_FPDS = 142,
+  CV_AMD64_ISEM = 143,
+  CV_AMD64_FPEIP = 144,
+  CV_AMD64_FPEDO = 145,
+  CV_AMD64_MM0 = 146,
+  CV_AMD64_MM1 = 147,
+  CV_AMD64_MM2 = 148,
+  CV_AMD64_MM3 = 149,
+  CV_AMD64_MM4 = 150,
+  CV_AMD64_MM5 = 151,
+  CV_AMD64_MM6 = 152,
+  CV_AMD64_MM7 = 153,
+  CV_AMD64_XMM0 = 154,
+  CV_AMD64_XMM1 = 155,
+  CV_AMD64_XMM2 = 156,
+  CV_AMD64_XMM3 = 157,
+  CV_AMD64_XMM4 = 158,
+  CV_AMD64_XMM5 = 159,
+  CV_AMD64_XMM6 = 160,
+  CV_AMD64_XMM7 = 161,
+  CV_AMD64_XMM0_0 = 162,
+  CV_AMD64_XMM0_1 = 163,
+  CV_AMD64_XMM0_2 = 164,
+  CV_AMD64_XMM0_3 = 165,
+  CV_AMD64_XMM1_0 = 166,
+  CV_AMD64_XMM1_1 = 167,
+  CV_AMD64_XMM1_2 = 168,
+  CV_AMD64_XMM1_3 = 169,
+  CV_AMD64_XMM2_0 = 170,
+  CV_AMD64_XMM2_1 = 171,
+  CV_AMD64_XMM2_2 = 172,
+  CV_AMD64_XMM2_3 = 173,
+  CV_AMD64_XMM3_0 = 174,
+  CV_AMD64_XMM3_1 = 175,
+  CV_AMD64_XMM3_2 = 176,
+  CV_AMD64_XMM3_3 = 177,
+  CV_AMD64_XMM4_0 = 178,
+  CV_AMD64_XMM4_1 = 179,
+  CV_AMD64_XMM4_2 = 180,
+  CV_AMD64_XMM4_3 = 181,
+  CV_AMD64_XMM5_0 = 182,
+  CV_AMD64_XMM5_1 = 183,
+  CV_AMD64_XMM5_2 = 184,
+  CV_AMD64_XMM5_3 = 185,
+  CV_AMD64_XMM6_0 = 186,
+  CV_AMD64_XMM6_1 = 187,
+  CV_AMD64_XMM6_2 = 188,
+  CV_AMD64_XMM6_3 = 189,
+  CV_AMD64_XMM7_0 = 190,
+  CV_AMD64_XMM7_1 = 191,
+  CV_AMD64_XMM7_2 = 192,
+  CV_AMD64_XMM7_3 = 193,
+  CV_AMD64_XMM0L = 194,
+  CV_AMD64_XMM1L = 195,
+  CV_AMD64_XMM2L = 196,
+  CV_AMD64_XMM3L = 197,
+  CV_AMD64_XMM4L = 198,
+  CV_AMD64_XMM5L = 199,
+  CV_AMD64_XMM6L = 200,
+  CV_AMD64_XMM7L = 201,
+  CV_AMD64_XMM0H = 202,
+  CV_AMD64_XMM1H = 203,
+  CV_AMD64_XMM2H = 204,
+  CV_AMD64_XMM3H = 205,
+  CV_AMD64_XMM4H = 206,
+  CV_AMD64_XMM5H = 207,
+  CV_AMD64_XMM6H = 208,
+  CV_AMD64_XMM7H = 209,
+  CV_AMD64_MXCSR = 211,
+  CV_AMD64_EMM0L = 220,
+  CV_AMD64_EMM1L = 221,
+  CV_AMD64_EMM2L = 222,
+  CV_AMD64_EMM3L = 223,
+  CV_AMD64_EMM4L = 224,
+  CV_AMD64_EMM5L = 225,
+  CV_AMD64_EMM6L = 226,
+  CV_AMD64_EMM7L = 227,
+  CV_AMD64_EMM0H = 228,
+  CV_AMD64_EMM1H = 229,
+  CV_AMD64_EMM2H = 230,
+  CV_AMD64_EMM3H = 231,
+  CV_AMD64_EMM4H = 232,
+  CV_AMD64_EMM5H = 233,
+  CV_AMD64_EMM6H = 234,
+  CV_AMD64_EMM7H = 235,
+  CV_AMD64_MM00 = 236,
+  CV_AMD64_MM01 = 237,
+  CV_AMD64_MM10 = 238,
+  CV_AMD64_MM11 = 239,
+  CV_AMD64_MM20 = 240,
+  CV_AMD64_MM21 = 241,
+  CV_AMD64_MM30 = 242,
+  CV_AMD64_MM31 = 243,
+  CV_AMD64_MM40 = 244,
+  CV_AMD64_MM41 = 245,
+  CV_AMD64_MM50 = 246,
+  CV_AMD64_MM51 = 247,
+  CV_AMD64_MM60 = 248,
+  CV_AMD64_MM61 = 249,
+  CV_AMD64_MM70 = 250,
+  CV_AMD64_MM71 = 251,
+  CV_AMD64_XMM8 = 252,
+  CV_AMD64_XMM9 = 253,
+  CV_AMD64_XMM10 = 254,
+  CV_AMD64_XMM11 = 255,
+  CV_AMD64_XMM12 = 256,
+  CV_AMD64_XMM13 = 257,
+  CV_AMD64_XMM14 = 258,
+  CV_AMD64_XMM15 = 259,
+  CV_AMD64_XMM8_0 = 260,
+  CV_AMD64_XMM8_1 = 261,
+  CV_AMD64_XMM8_2 = 262,
+  CV_AMD64_XMM8_3 = 263,
+  CV_AMD64_XMM9_0 = 264,
+  CV_AMD64_XMM9_1 = 265,
+  CV_AMD64_XMM9_2 = 266,
+  CV_AMD64_XMM9_3 = 267,
+  CV_AMD64_XMM10_0 = 268,
+  CV_AMD64_XMM10_1 = 269,
+  CV_AMD64_XMM10_2 = 270,
+  CV_AMD64_XMM10_3 = 271,
+  CV_AMD64_XMM11_0 = 272,
+  CV_AMD64_XMM11_1 = 273,
+  CV_AMD64_XMM11_2 = 274,
+  CV_AMD64_XMM11_3 = 275,
+  CV_AMD64_XMM12_0 = 276,
+  CV_AMD64_XMM12_1 = 277,
+  CV_AMD64_XMM12_2 = 278,
+  CV_AMD64_XMM12_3 = 279,
+  CV_AMD64_XMM13_0 = 280,
+  CV_AMD64_XMM13_1 = 281,
+  CV_AMD64_XMM13_2 = 282,
+  CV_AMD64_XMM13_3 = 283,
+  CV_AMD64_XMM14_0 = 284,
+  CV_AMD64_XMM14_1 = 285,
+  CV_AMD64_XMM14_2 = 286,
+  CV_AMD64_XMM14_3 = 287,
+  CV_AMD64_XMM15_0 = 288,
+  CV_AMD64_XMM15_1 = 289,
+  CV_AMD64_XMM15_2 = 290,
+  CV_AMD64_XMM15_3 = 291,
+  CV_AMD64_XMM8L = 292,
+  CV_AMD64_XMM9L = 293,
+  CV_AMD64_XMM10L = 294,
+  CV_AMD64_XMM11L = 295,
+  CV_AMD64_XMM12L = 296,
+  CV_AMD64_XMM13L = 297,
+  CV_AMD64_XMM14L = 298,
+  CV_AMD64_XMM15L = 299,
+  CV_AMD64_XMM8H = 300,
+  CV_AMD64_XMM9H = 301,
+  CV_AMD64_XMM10H = 302,
+  CV_AMD64_XMM11H = 303,
+  CV_AMD64_XMM12H = 304,
+  CV_AMD64_XMM13H = 305,
+  CV_AMD64_XMM14H = 306,
+  CV_AMD64_XMM15H = 307,
+  CV_AMD64_EMM8L = 308,
+  CV_AMD64_EMM9L = 309,
+  CV_AMD64_EMM10L = 310,
+  CV_AMD64_EMM11L = 311,
+  CV_AMD64_EMM12L = 312,
+  CV_AMD64_EMM13L = 313,
+  CV_AMD64_EMM14L = 314,
+  CV_AMD64_EMM15L = 315,
+  CV_AMD64_EMM8H = 316,
+  CV_AMD64_EMM9H = 317,
+  CV_AMD64_EMM10H = 318,
+  CV_AMD64_EMM11H = 319,
+  CV_AMD64_EMM12H = 320,
+  CV_AMD64_EMM13H = 321,
+  CV_AMD64_EMM14H = 322,
+  CV_AMD64_EMM15H = 323,
+  CV_AMD64_SIL = 324,
+  CV_AMD64_DIL = 325,
+  CV_AMD64_BPL = 326,
+  CV_AMD64_SPL = 327,
+  CV_AMD64_RAX = 328,
+  CV_AMD64_RBX = 329,
+  CV_AMD64_RCX = 330,
+  CV_AMD64_RDX = 331,
+  CV_AMD64_RSI = 332,
+  CV_AMD64_RDI = 333,
+  CV_AMD64_RBP = 334,
+  CV_AMD64_RSP = 335,
+  CV_AMD64_R8 = 336,
+  CV_AMD64_R9 = 337,
+  CV_AMD64_R10 = 338,
+  CV_AMD64_R11 = 339,
+  CV_AMD64_R12 = 340,
+  CV_AMD64_R13 = 341,
+  CV_AMD64_R14 = 342,
+  CV_AMD64_R15 = 343,
+  CV_AMD64_R8B = 344,
+  CV_AMD64_R9B = 345,
+  CV_AMD64_R10B = 346,
+  CV_AMD64_R11B = 347,
+  CV_AMD64_R12B = 348,
+  CV_AMD64_R13B = 349,
+  CV_AMD64_R14B = 350,
+  CV_AMD64_R15B = 351,
+  CV_AMD64_R8W = 352,
+  CV_AMD64_R9W = 353,
+  CV_AMD64_R10W = 354,
+  CV_AMD64_R11W = 355,
+  CV_AMD64_R12W = 356,
+  CV_AMD64_R13W = 357,
+  CV_AMD64_R14W = 358,
+  CV_AMD64_R15W = 359,
+  CV_AMD64_R8D = 360,
+  CV_AMD64_R9D = 361,
+  CV_AMD64_R10D = 362,
+  CV_AMD64_R11D = 363,
+  CV_AMD64_R12D = 364,
+  CV_AMD64_R13D = 365,
+  CV_AMD64_R14D = 366,
+  CV_AMD64_R15D = 367,
+  CV_AMD64_YMM0 = 368,
+  CV_AMD64_YMM1 = 369,
+  CV_AMD64_YMM2 = 370,
+  CV_AMD64_YMM3 = 371,
+  CV_AMD64_YMM4 = 372,
+  CV_AMD64_YMM5 = 373,
+  CV_AMD64_YMM6 = 374,
+  CV_AMD64_YMM7 = 375,
+  CV_AMD64_YMM8 = 376,
+  CV_AMD64_YMM9 = 377,
+  CV_AMD64_YMM10 = 378,
+  CV_AMD64_YMM11 = 379,
+  CV_AMD64_YMM12 = 380,
+  CV_AMD64_YMM13 = 381,
+  CV_AMD64_YMM14 = 382,
+  CV_AMD64_YMM15 = 383,
+  CV_AMD64_YMM0H = 384,
+  CV_AMD64_YMM1H = 385,
+  CV_AMD64_YMM2H = 386,
+  CV_AMD64_YMM3H = 387,
+  CV_AMD64_YMM4H = 388,
+  CV_AMD64_YMM5H = 389,
+  CV_AMD64_YMM6H = 390,
+  CV_AMD64_YMM7H = 391,
+  CV_AMD64_YMM8H = 392,
+  CV_AMD64_YMM9H = 393,
+  CV_AMD64_YMM10H = 394,
+  CV_AMD64_YMM11H = 395,
+  CV_AMD64_YMM12H = 396,
+  CV_AMD64_YMM13H = 397,
+  CV_AMD64_YMM14H = 398,
+  CV_AMD64_YMM15H = 399,
+  CV_AMD64_XMM0IL = 400,
+  CV_AMD64_XMM1IL = 401,
+  CV_AMD64_XMM2IL = 402,
+  CV_AMD64_XMM3IL = 403,
+  CV_AMD64_XMM4IL = 404,
+  CV_AMD64_XMM5IL = 405,
+  CV_AMD64_XMM6IL = 406,
+  CV_AMD64_XMM7IL = 407,
+  CV_AMD64_XMM8IL = 408,
+  CV_AMD64_XMM9IL = 409,
+  CV_AMD64_XMM10IL = 410,
+  CV_AMD64_XMM11IL = 411,
+  CV_AMD64_XMM12IL = 412,
+  CV_AMD64_XMM13IL = 413,
+  CV_AMD64_XMM14IL = 414,
+  CV_AMD64_XMM15IL = 415,
+  CV_AMD64_XMM0IH = 416,
+  CV_AMD64_XMM1IH = 417,
+  CV_AMD64_XMM2IH = 418,
+  CV_AMD64_XMM3IH = 419,
+  CV_AMD64_XMM4IH = 420,
+  CV_AMD64_XMM5IH = 421,
+  CV_AMD64_XMM6IH = 422,
+  CV_AMD64_XMM7IH = 423,
+  CV_AMD64_XMM8IH = 424,
+  CV_AMD64_XMM9IH = 425,
+  CV_AMD64_XMM10IH = 426,
+  CV_AMD64_XMM11IH = 427,
+  CV_AMD64_XMM12IH = 428,
+  CV_AMD64_XMM13IH = 429,
+  CV_AMD64_XMM14IH = 430,
+  CV_AMD64_XMM15IH = 431,
+  CV_AMD64_YMM0I0 = 432,
+  CV_AMD64_YMM0I1 = 433,
+  CV_AMD64_YMM0I2 = 434,
+  CV_AMD64_YMM0I3 = 435,
+  CV_AMD64_YMM1I0 = 436,
+  CV_AMD64_YMM1I1 = 437,
+  CV_AMD64_YMM1I2 = 438,
+  CV_AMD64_YMM1I3 = 439,
+  CV_AMD64_YMM2I0 = 440,
+  CV_AMD64_YMM2I1 = 441,
+  CV_AMD64_YMM2I2 = 442,
+  CV_AMD64_YMM2I3 = 443,
+  CV_AMD64_YMM3I0 = 444,
+  CV_AMD64_YMM3I1 = 445,
+  CV_AMD64_YMM3I2 = 446,
+  CV_AMD64_YMM3I3 = 447,
+  CV_AMD64_YMM4I0 = 448,
+  CV_AMD64_YMM4I1 = 449,
+  CV_AMD64_YMM4I2 = 450,
+  CV_AMD64_YMM4I3 = 451,
+  CV_AMD64_YMM5I0 = 452,
+  CV_AMD64_YMM5I1 = 453,
+  CV_AMD64_YMM5I2 = 454,
+  CV_AMD64_YMM5I3 = 455,
+  CV_AMD64_YMM6I0 = 456,
+  CV_AMD64_YMM6I1 = 457,
+  CV_AMD64_YMM6I2 = 458,
+  CV_AMD64_YMM6I3 = 459,
+  CV_AMD64_YMM7I0 = 460,
+  CV_AMD64_YMM7I1 = 461,
+  CV_AMD64_YMM7I2 = 462,
+  CV_AMD64_YMM7I3 = 463,
+  CV_AMD64_YMM8I0 = 464,
+  CV_AMD64_YMM8I1 = 465,
+  CV_AMD64_YMM8I2 = 466,
+  CV_AMD64_YMM8I3 = 467,
+  CV_AMD64_YMM9I0 = 468,
+  CV_AMD64_YMM9I1 = 469,
+  CV_AMD64_YMM9I2 = 470,
+  CV_AMD64_YMM9I3 = 471,
+  CV_AMD64_YMM10I0 = 472,
+  CV_AMD64_YMM10I1 = 473,
+  CV_AMD64_YMM10I2 = 474,
+  CV_AMD64_YMM10I3 = 475,
+  CV_AMD64_YMM11I0 = 476,
+  CV_AMD64_YMM11I1 = 477,
+  CV_AMD64_YMM11I2 = 478,
+  CV_AMD64_YMM11I3 = 479,
+  CV_AMD64_YMM12I0 = 480,
+  CV_AMD64_YMM12I1 = 481,
+  CV_AMD64_YMM12I2 = 482,
+  CV_AMD64_YMM12I3 = 483,
+  CV_AMD64_YMM13I0 = 484,
+  CV_AMD64_YMM13I1 = 485,
+  CV_AMD64_YMM13I2 = 486,
+  CV_AMD64_YMM13I3 = 487,
+  CV_AMD64_YMM14I0 = 488,
+  CV_AMD64_YMM14I1 = 489,
+  CV_AMD64_YMM14I2 = 490,
+  CV_AMD64_YMM14I3 = 491,
+  CV_AMD64_YMM15I0 = 492,
+  CV_AMD64_YMM15I1 = 493,
+  CV_AMD64_YMM15I2 = 494,
+  CV_AMD64_YMM15I3 = 495,
+  CV_AMD64_YMM0F0 = 496,
+  CV_AMD64_YMM0F1 = 497,
+  CV_AMD64_YMM0F2 = 498,
+  CV_AMD64_YMM0F3 = 499,
+  CV_AMD64_YMM0F4 = 500,
+  CV_AMD64_YMM0F5 = 501,
+  CV_AMD64_YMM0F6 = 502,
+  CV_AMD64_YMM0F7 = 503,
+  CV_AMD64_YMM1F0 = 504,
+  CV_AMD64_YMM1F1 = 505,
+  CV_AMD64_YMM1F2 = 506,
+  CV_AMD64_YMM1F3 = 507,
+  CV_AMD64_YMM1F4 = 508,
+  CV_AMD64_YMM1F5 = 509,
+  CV_AMD64_YMM1F6 = 510,
+  CV_AMD64_YMM1F7 = 511,
+  CV_AMD64_YMM2F0 = 512,
+  CV_AMD64_YMM2F1 = 513,
+  CV_AMD64_YMM2F2 = 514,
+  CV_AMD64_YMM2F3 = 515,
+  CV_AMD64_YMM2F4 = 516,
+  CV_AMD64_YMM2F5 = 517,
+  CV_AMD64_YMM2F6 = 518,
+  CV_AMD64_YMM2F7 = 519,
+  CV_AMD64_YMM3F0 = 520,
+  CV_AMD64_YMM3F1 = 521,
+  CV_AMD64_YMM3F2 = 522,
+  CV_AMD64_YMM3F3 = 523,
+  CV_AMD64_YMM3F4 = 524,
+  CV_AMD64_YMM3F5 = 525,
+  CV_AMD64_YMM3F6 = 526,
+  CV_AMD64_YMM3F7 = 527,
+  CV_AMD64_YMM4F0 = 528,
+  CV_AMD64_YMM4F1 = 529,
+  CV_AMD64_YMM4F2 = 530,
+  CV_AMD64_YMM4F3 = 531,
+  CV_AMD64_YMM4F4 = 532,
+  CV_AMD64_YMM4F5 = 533,
+  CV_AMD64_YMM4F6 = 534,
+  CV_AMD64_YMM4F7 = 535,
+  CV_AMD64_YMM5F0 = 536,
+  CV_AMD64_YMM5F1 = 537,
+  CV_AMD64_YMM5F2 = 538,
+  CV_AMD64_YMM5F3 = 539,
+  CV_AMD64_YMM5F4 = 540,
+  CV_AMD64_YMM5F5 = 541,
+  CV_AMD64_YMM5F6 = 542,
+  CV_AMD64_YMM5F7 = 543,
+  CV_AMD64_YMM6F0 = 544,
+  CV_AMD64_YMM6F1 = 545,
+  CV_AMD64_YMM6F2 = 546,
+  CV_AMD64_YMM6F3 = 547,
+  CV_AMD64_YMM6F4 = 548,
+  CV_AMD64_YMM6F5 = 549,
+  CV_AMD64_YMM6F6 = 550,
+  CV_AMD64_YMM6F7 = 551,
+  CV_AMD64_YMM7F0 = 552,
+  CV_AMD64_YMM7F1 = 553,
+  CV_AMD64_YMM7F2 = 554,
+  CV_AMD64_YMM7F3 = 555,
+  CV_AMD64_YMM7F4 = 556,
+  CV_AMD64_YMM7F5 = 557,
+  CV_AMD64_YMM7F6 = 558,
+  CV_AMD64_YMM7F7 = 559,
+  CV_AMD64_YMM8F0 = 560,
+  CV_AMD64_YMM8F1 = 561,
+  CV_AMD64_YMM8F2 = 562,
+  CV_AMD64_YMM8F3 = 563,
+  CV_AMD64_YMM8F4 = 564,
+  CV_AMD64_YMM8F5 = 565,
+  CV_AMD64_YMM8F6 = 566,
+  CV_AMD64_YMM8F7 = 567,
+  CV_AMD64_YMM9F0 = 568,
+  CV_AMD64_YMM9F1 = 569,
+  CV_AMD64_YMM9F2 = 570,
+  CV_AMD64_YMM9F3 = 571,
+  CV_AMD64_YMM9F4 = 572,
+  CV_AMD64_YMM9F5 = 573,
+  CV_AMD64_YMM9F6 = 574,
+  CV_AMD64_YMM9F7 = 575,
+  CV_AMD64_YMM10F0 = 576,
+  CV_AMD64_YMM10F1 = 577,
+  CV_AMD64_YMM10F2 = 578,
+  CV_AMD64_YMM10F3 = 579,
+  CV_AMD64_YMM10F4 = 580,
+  CV_AMD64_YMM10F5 = 581,
+  CV_AMD64_YMM10F6 = 582,
+  CV_AMD64_YMM10F7 = 583,
+  CV_AMD64_YMM11F0 = 584,
+  CV_AMD64_YMM11F1 = 585,
+  CV_AMD64_YMM11F2 = 586,
+  CV_AMD64_YMM11F3 = 587,
+  CV_AMD64_YMM11F4 = 588,
+  CV_AMD64_YMM11F5 = 589,
+  CV_AMD64_YMM11F6 = 590,
+  CV_AMD64_YMM11F7 = 591,
+  CV_AMD64_YMM12F0 = 592,
+  CV_AMD64_YMM12F1 = 593,
+  CV_AMD64_YMM12F2 = 594,
+  CV_AMD64_YMM12F3 = 595,
+  CV_AMD64_YMM12F4 = 596,
+  CV_AMD64_YMM12F5 = 597,
+  CV_AMD64_YMM12F6 = 598,
+  CV_AMD64_YMM12F7 = 599,
+  CV_AMD64_YMM13F0 = 600,
+  CV_AMD64_YMM13F1 = 601,
+  CV_AMD64_YMM13F2 = 602,
+  CV_AMD64_YMM13F3 = 603,
+  CV_AMD64_YMM13F4 = 604,
+  CV_AMD64_YMM13F5 = 605,
+  CV_AMD64_YMM13F6 = 606,
+  CV_AMD64_YMM13F7 = 607,
+  CV_AMD64_YMM14F0 = 608,
+  CV_AMD64_YMM14F1 = 609,
+  CV_AMD64_YMM14F2 = 610,
+  CV_AMD64_YMM14F3 = 611,
+  CV_AMD64_YMM14F4 = 612,
+  CV_AMD64_YMM14F5 = 613,
+  CV_AMD64_YMM14F6 = 614,
+  CV_AMD64_YMM14F7 = 615,
+  CV_AMD64_YMM15F0 = 616,
+  CV_AMD64_YMM15F1 = 617,
+  CV_AMD64_YMM15F2 = 618,
+  CV_AMD64_YMM15F3 = 619,
+  CV_AMD64_YMM15F4 = 620,
+  CV_AMD64_YMM15F5 = 621,
+  CV_AMD64_YMM15F6 = 622,
+  CV_AMD64_YMM15F7 = 623,
+  CV_AMD64_YMM0D0 = 624,
+  CV_AMD64_YMM0D1 = 625,
+  CV_AMD64_YMM0D2 = 626,
+  CV_AMD64_YMM0D3 = 627,
+  CV_AMD64_YMM1D0 = 628,
+  CV_AMD64_YMM1D1 = 629,
+  CV_AMD64_YMM1D2 = 630,
+  CV_AMD64_YMM1D3 = 631,
+  CV_AMD64_YMM2D0 = 632,
+  CV_AMD64_YMM2D1 = 633,
+  CV_AMD64_YMM2D2 = 634,
+  CV_AMD64_YMM2D3 = 635,
+  CV_AMD64_YMM3D0 = 636,
+  CV_AMD64_YMM3D1 = 637,
+  CV_AMD64_YMM3D2 = 638,
+  CV_AMD64_YMM3D3 = 639,
+  CV_AMD64_YMM4D0 = 640,
+  CV_AMD64_YMM4D1 = 641,
+  CV_AMD64_YMM4D2 = 642,
+  CV_AMD64_YMM4D3 = 643,
+  CV_AMD64_YMM5D0 = 644,
+  CV_AMD64_YMM5D1 = 645,
+  CV_AMD64_YMM5D2 = 646,
+  CV_AMD64_YMM5D3 = 647,
+  CV_AMD64_YMM6D0 = 648,
+  CV_AMD64_YMM6D1 = 649,
+  CV_AMD64_YMM6D2 = 650,
+  CV_AMD64_YMM6D3 = 651,
+  CV_AMD64_YMM7D0 = 652,
+  CV_AMD64_YMM7D1 = 653,
+  CV_AMD64_YMM7D2 = 654,
+  CV_AMD64_YMM7D3 = 655,
+  CV_AMD64_YMM8D0 = 656,
+  CV_AMD64_YMM8D1 = 657,
+  CV_AMD64_YMM8D2 = 658,
+  CV_AMD64_YMM8D3 = 659,
+  CV_AMD64_YMM9D0 = 660,
+  CV_AMD64_YMM9D1 = 661,
+  CV_AMD64_YMM9D2 = 662,
+  CV_AMD64_YMM9D3 = 663,
+  CV_AMD64_YMM10D0 = 664,
+  CV_AMD64_YMM10D1 = 665,
+  CV_AMD64_YMM10D2 = 666,
+  CV_AMD64_YMM10D3 = 667,
+  CV_AMD64_YMM11D0 = 668,
+  CV_AMD64_YMM11D1 = 669,
+  CV_AMD64_YMM11D2 = 670,
+  CV_AMD64_YMM11D3 = 671,
+  CV_AMD64_YMM12D0 = 672,
+  CV_AMD64_YMM12D1 = 673,
+  CV_AMD64_YMM12D2 = 674,
+  CV_AMD64_YMM12D3 = 675,
+  CV_AMD64_YMM13D0 = 676,
+  CV_AMD64_YMM13D1 = 677,
+  CV_AMD64_YMM13D2 = 678,
+  CV_AMD64_YMM13D3 = 679,
+  CV_AMD64_YMM14D0 = 680,
+  CV_AMD64_YMM14D1 = 681,
+  CV_AMD64_YMM14D2 = 682,
+  CV_AMD64_YMM14D3 = 683,
+  CV_AMD64_YMM15D0 = 684,
+  CV_AMD64_YMM15D1 = 685,
+  CV_AMD64_YMM15D2 = 686,
+  CV_AMD64_YMM15D3 = 687
+};
+
 #endif
-- 
2.26.2

Reply via email to