diff --git a/JsonToolsNppPlugin/Forms/TreeViewer.cs b/JsonToolsNppPlugin/Forms/TreeViewer.cs
index 515e648..9af736b 100644
--- a/JsonToolsNppPlugin/Forms/TreeViewer.cs
+++ b/JsonToolsNppPlugin/Forms/TreeViewer.cs
@@ -8,8 +8,10 @@
using System.Windows.Forms;
using System.Collections.Generic;
using System.Linq;
+using Kbg.NppPluginNET.PluginInfrastructure;
+using NppPluginNET.PluginInfrastructure;
using JSON_Viewer.JSONViewer;
-using JSON_Viewer.Infrastructure;
+using JSON_Viewer.Tools;
namespace JSON_Viewer.Forms
{
diff --git a/JsonToolsNppPlugin/JSONTools/JNode.cs b/JsonToolsNppPlugin/JSONTools/JNode.cs
index 67d07ff..aa33cfb 100644
--- a/JsonToolsNppPlugin/JSONTools/JNode.cs
+++ b/JsonToolsNppPlugin/JSONTools/JNode.cs
@@ -7,8 +7,19 @@
using System.Text.RegularExpressions;
using System.Collections.Generic; // for dictionary, list
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
+ public struct Str_Line
+ {
+ public string str;
+ public int line;
+
+ public Str_Line(string str, int line)
+ {
+ this.str = str;
+ this.line = line;
+ }
+ }
///
/// JNode type indicator
///
@@ -87,12 +98,12 @@ public enum Dtype : ushort
///
public class JNode : IComparable
{
- public IComparable? value; // null for arrays and objects
+ public IComparable value; // null for arrays and objects
// IComparable is good here because we want easy comparison of JNodes
public Dtype type;
public int line_num;
- public JNode(IComparable? value,
+ public JNode(IComparable value,
Dtype type,
int line_num)
{
@@ -217,7 +228,7 @@ public virtual string PrettyPrint(int indent = 4, bool sort_keys = true, int dep
///
public virtual string ToStringAndChangeLineNumbers(bool sort_keys = true, int? cur_line_num = null)
{
- if (cur_line_num != null) { line_num = cur_line_num.Value; }
+ if (cur_line_num != null) { line_num = (int)cur_line_num; }
return ToString();
}
@@ -238,7 +249,7 @@ public virtual string ToStringAndChangeLineNumbers(bool sort_keys = true, int? c
///
public virtual string PrettyPrintAndChangeLineNumbers(int indent = 4, bool sort_keys = true, int depth = 0, int? cur_line_num = null)
{
- if (cur_line_num != null) { line_num = cur_line_num.Value; }
+ if (cur_line_num != null) { line_num = (int)cur_line_num; }
return ToString();
}
@@ -254,10 +265,10 @@ public virtual string PrettyPrintAndChangeLineNumbers(int indent = 4, bool sort_
///
///
///
- public virtual (string str, int curline) PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
+ public virtual Str_Line PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
{
line_num = curline;
- return (ToString(), curline);
+ return new Str_Line(ToString(), curline);
}
/////
@@ -282,7 +293,7 @@ public virtual (string str, int curline) PrettyPrintChangeLinesHelper(int indent
///
/// If an attempt is made to compare two things of different type.
///
- public int CompareTo(object? other)
+ public int CompareTo(object other)
{
if (other is JNode)
{
@@ -401,7 +412,7 @@ public override string PrettyPrint(int indent = 4, bool sort_keys = true, int de
///
public override string ToStringAndChangeLineNumbers(bool sort_keys = true, int? cur_line_num = null)
{
- int curline = (cur_line_num == null) ? line_num : cur_line_num.Value;
+ int curline = (cur_line_num == null) ? line_num : (int)cur_line_num;
line_num = curline;
var sb = new StringBuilder();
sb.Append('{');
@@ -430,39 +441,38 @@ public override string PrettyPrintAndChangeLineNumbers(int indent = 4, bool sort
{
// the cur_line_num is based off of the root node, whichever node originally called
// PrettyPrintAndChangeLineNumbers. If this is the root node, everything else's line number is based on this one's.
- int curline = (cur_line_num == null) ? line_num : cur_line_num.Value;
- (string str, _) = PrettyPrintChangeLinesHelper(indent, sort_keys, depth, curline);
- return str;
+ int curline = (cur_line_num == null) ? line_num : (int)cur_line_num;
+ return PrettyPrintChangeLinesHelper(indent, sort_keys, depth, curline).str;
}
///
- public override (string str, int curline) PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
+ public override Str_Line PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
{
line_num = curline;
string dent = new string(' ', indent * depth);
var sb = new StringBuilder();
sb.Append($"{dent}{{{Environment.NewLine}");
int ctr = 0;
- string vstr;
string[] keys = children.Keys.ToArray();
if (sort_keys) Array.Sort(keys, (x, y) => x.ToLower().CompareTo(y.ToLower()));
foreach (string k in keys)
{
JNode v = children[k];
+ Str_Line sline;
if (v is JObject || v is JArray)
{
- (vstr, curline) = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, curline + 2);
- sb.Append($"{dent}\"{k}\":{Environment.NewLine}{vstr}");
+ sline = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, curline + 2);
+ sb.Append($"{dent}\"{k}\":{Environment.NewLine}{sline.str}");
}
else
{
- (vstr, curline) = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, curline + 1);
- sb.Append($"{dent}\"{k}\": {vstr}");
+ sline = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, curline + 1);
+ sb.Append($"{dent}\"{k}\": {sline.str}");
}
sb.Append((++ctr == children.Count) ? Environment.NewLine : "," + Environment.NewLine);
}
sb.Append($"{dent}}}");
- return (sb.ToString(), curline + 1);
+ return new Str_Line(sb.ToString(), curline + 1);
}
/////
@@ -492,7 +502,7 @@ public override bool Equals(JNode other)
foreach (string key in children.Keys)
{
JNode val = children[key];
- bool other_haskey = othobj.children.TryGetValue(key, out JNode? valobj);
+ bool other_haskey = othobj.children.TryGetValue(key, out JNode valobj);
if (!other_haskey || !val.Equals(valobj))
{
return false;
@@ -562,7 +572,7 @@ public override string PrettyPrint(int indent = 4, bool sort_keys = true, int de
///
public override string ToStringAndChangeLineNumbers(bool sort_keys = true, int? cur_line_num = null)
{
- int curline = (cur_line_num == null) ? line_num : cur_line_num.Value;
+ int curline = (cur_line_num == null) ? line_num : (int)cur_line_num;
line_num = curline;
var sb = new StringBuilder();
sb.Append('[');
@@ -588,37 +598,35 @@ public override string PrettyPrintAndChangeLineNumbers(int indent = 4, bool sort
{
// the cur_line_num is based off of the root node, whichever node originally called
// PrettyPrintAndChangeLineNumbers. If this is the root node, everything else's line number is based on this one's.
- int curline = (cur_line_num == null) ? line_num : cur_line_num.Value;
- (string str, _) = PrettyPrintChangeLinesHelper(indent, sort_keys, depth, curline);
- return str;
+ int curline = (cur_line_num == null) ? line_num : (int)cur_line_num;
+ return PrettyPrintChangeLinesHelper(indent, sort_keys, depth, curline).str;
}
///
- public override (string str, int curline) PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
+ public override Str_Line PrettyPrintChangeLinesHelper(int indent, bool sort_keys, int depth, int curline)
{
-
line_num = curline;
string dent = new string(' ', indent * depth);
var sb = new StringBuilder();
sb.Append($"{dent}[" + Environment.NewLine);
int ctr = 0;
- string vstr;
foreach (JNode v in children)
{
- (vstr, curline) = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, ++curline);
+ Str_Line sline = v.PrettyPrintChangeLinesHelper(indent, sort_keys, depth + 1, ++curline);
+ curline = sline.line;
// this child's string could be multiple lines, so we need to know what the final line of its string was.
if (v is JObject || v is JArray)
{
- sb.Append(vstr);
+ sb.Append(sline.str);
}
else
{
- sb.Append($"{dent}{vstr}");
+ sb.Append($"{dent}{sline.str}");
}
sb.Append((++ctr == children.Count) ? Environment.NewLine : "," + Environment.NewLine);
}
sb.Append($"{dent}]");
- return (sb.ToString(), curline + 1);
+ return new Str_Line(sb.ToString(), curline + 1);
}
/////
diff --git a/JsonToolsNppPlugin/JSONTools/JsonGrepper.cs b/JsonToolsNppPlugin/JSONTools/JsonGrepper.cs
index 9d30f60..9adc086 100644
--- a/JsonToolsNppPlugin/JSONTools/JsonGrepper.cs
+++ b/JsonToolsNppPlugin/JSONTools/JsonGrepper.cs
@@ -3,10 +3,10 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
-using System.Net.Http.Headers;
-using System.Net.Http;
+using System.Web;
+using JSON_Tools.Utils;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
///
/// Reads JSON files based on search patterns, and also fetches JSON from APIS.
@@ -14,10 +14,6 @@ namespace JSON_Viewer.JSONViewer
///
public class JsonGrepper
{
- ///
- /// used to send HTTP GET requests to APIs to fetch JSON
- ///
- private static readonly HttpClient client = new HttpClient();
///
/// maps filenames and urls to parsed JSON
///
@@ -25,7 +21,7 @@ public class JsonGrepper
public JsonParser json_parser;
public Dictionary fname_lints;
- public JsonGrepper(JsonParser? json_parser = null)
+ public JsonGrepper(JsonParser json_parser = null)
{
fname_jsons = new JObject(0, new Dictionary());
if (json_parser == null)
@@ -84,72 +80,72 @@ public void ReadJsonFiles(string root_dir, bool recursive = false, string search
///
///
///
- public async Task GetJsonStringFromApiAsync(string url)
- {
- // below is an example of how to set headers
- // see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers#request_context
- client.DefaultRequestHeaders.Accept.Clear();
- // the below Accept header is necessary to specify what kind of media response you will accept
- client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
- // the User-Agent header tells the API who you are
- client.DefaultRequestHeaders.Add("User-Agent", "JSON viewer API request tool");
- try
- {
- Task stringTask = client.GetStringAsync(url);
- string? json = await stringTask;
- return json;
- }
- catch (Exception ex)
- {
- return ex.ToString();
- }
- }
+ //public async Task GetJsonStringFromApiAsync(string url)
+ //{
+ // // below is an example of how to set headers
+ // // see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers#request_context
+ // client.DefaultRequestHeaders.Accept.Clear();
+ // // the below Accept header is necessary to specify what kind of media response you will accept
+ // client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
+ // // the User-Agent header tells the API who you are
+ // client.DefaultRequestHeaders.Add("User-Agent", "JSON viewer API request tool");
+ // try
+ // {
+ // Task stringTask = client.GetStringAsync(url);
+ // string json = await stringTask;
+ // return json;
+ // }
+ // catch (Exception ex)
+ // {
+ // return ex.ToString();
+ // }
+ //}
- ///
- /// Asynchronously send API requests to several URLs.
- /// For each URL where the request succeeds, try to parse the JSON returned.
- /// If the JSON returned is valid, add the JSON to fname_jsons.
- /// Return a list of all URLs for which the request succeeded,
- /// a dict mapping urls to exception strings,
- /// and a dict mapping urls to JsonLint arrays (lists of syntax errors in JSON)
- ///
- ///
- ///
- public async Task<(HashSet urls_requested, Dictionary exceptions)> GetJsonFromAllUrls(string[] urls)
- {
- var json_tasks = new Dictionary>();
- foreach (string url in urls)
- {
- // if (!fname_jsons.children.ContainsKey(url))
- //// it is probably better to allow duplication of labor, so that the user can get new JSON
- //// if something changed
- json_tasks[url] = GetJsonStringFromApiAsync(url);
- }
- string[] jsons = await Task.WhenAll(json_tasks.Values);
- // keep track of which urls were actually requested, excluding the ones where the request failed
- var urls_requested = new HashSet();
- var exceptions = new Dictionary();
- for (int ii = 0; ii < urls.Length; ii++)
- {
- string url = urls[ii];
- string json = jsons[ii];
- try
- {
- fname_jsons.children[url] = json_parser.Parse(json);
- if (json_parser.lint != null && json_parser.lint.Count > 0)
- {
- fname_lints[url] = json_parser.lint.LazySlice(":").ToArray();
- }
- urls_requested.Add(url);
- }
- catch (Exception ex)
- {
- // GetJsonStringFromApiAsync returns the exception message if the request failed.
- exceptions[url] = json;
- }
- }
- return (urls_requested, exceptions);
- }
+ /////
+ ///// Asynchronously send API requests to several URLs.
+ ///// For each URL where the request succeeds, try to parse the JSON returned.
+ ///// If the JSON returned is valid, add the JSON to fname_jsons.
+ ///// Return a list of all URLs for which the request succeeded,
+ ///// a dict mapping urls to exception strings,
+ ///// and a dict mapping urls to JsonLint arrays (lists of syntax errors in JSON)
+ /////
+ /////
+ /////
+ //public async Task<(HashSet urls_requested, Dictionary exceptions)> GetJsonFromAllUrls(string[] urls)
+ //{
+ // var json_tasks = new Dictionary>();
+ // foreach (string url in urls)
+ // {
+ // // if (!fname_jsons.children.ContainsKey(url))
+ // //// it is probably better to allow duplication of labor, so that the user can get new JSON
+ // //// if something changed
+ // json_tasks[url] = GetJsonStringFromApiAsync(url);
+ // }
+ // string[] jsons = await Task.WhenAll(json_tasks.Values);
+ // // keep track of which urls were actually requested, excluding the ones where the request failed
+ // var urls_requested = new HashSet();
+ // var exceptions = new Dictionary();
+ // for (int ii = 0; ii < urls.Length; ii++)
+ // {
+ // string url = urls[ii];
+ // string json = jsons[ii];
+ // try
+ // {
+ // fname_jsons.children[url] = json_parser.Parse(json);
+ // if (json_parser.lint != null && json_parser.lint.Count > 0)
+ // {
+ // fname_lints[url] = json_parser.lint.LazySlice(":").ToArray();
+ // }
+ // urls_requested.Add(url);
+ // }
+ // catch (Exception ex)
+ // {
+ // // GetJsonStringFromApiAsync returns the exception message if the request failed.
+ // exceptions[url] = json;
+ // }
+ // }
+ // return (urls_requested, exceptions);
+ //}
///
/// clear the map from filenames to JSON objects, and get rid of any lint
@@ -161,91 +157,91 @@ public void Reset()
}
}
- public class JsonGrepperTester
- {
- public static DirectoryInfo smalldir = new DirectoryInfo(@"C:\Users\mjols\Documents\csharp\JSON_Viewer_cmd\testfiles\small");
- public static JsonParser jparser = new JsonParser();
- public static RemesParser rparser = new RemesParser();
- private static JsonGrepper grepper = new JsonGrepper(new JsonParser());
+ //public class JsonGrepperTester
+ // {
+ // public static DirectoryInfo smalldir = new DirectoryInfo(@"C:\Users\mjols\Documents\csharp\JSON_Viewer_cmd\testfiles\small");
+ // public static JsonParser jparser = new JsonParser();
+ // public static RemesParser rparser = new RemesParser();
+ // private static JsonGrepper grepper = new JsonGrepper(new JsonParser());
- public static void TestFnames()
- {
- JObject all_jsons = new JObject(0, new Dictionary());
- foreach (FileInfo f in smalldir.GetFiles())
- {
- string jsontxt = File.ReadAllText(f.FullName);
- try
- {
- all_jsons.children[f.FullName] = jparser.Parse(jsontxt);
- }
- catch { }
- }
- DirectoryInfo subdir = new DirectoryInfo($"{smalldir.FullName}\\subsmall");
- foreach (FileInfo f in subdir.GetFiles())
- {
- string jsontxt = File.ReadAllText(f.FullName);
- try
- {
- all_jsons.children[f.FullName] = jparser.Parse(jsontxt);
- }
- catch { }
- }
- var testcases = new (string search_pattern, bool recursive, JNode desired_files)[]
- {
- ("*.json", false, rparser.Search("keys(@)[@ =~ `.json$` & not(@ =~ `subsmall`)]", all_jsons)), // fnames not containing submall but ending in .json
- ("*.ipynb", false, rparser.Search("keys(@)[@ =~ `ipynb$` & not(@ =~ `subsmall`)]", all_jsons)), // fnames not containing subsmall but ending in .ipynb
- ("*.json", true, rparser.Search("keys(@)[@ =~ `json$`]", all_jsons)), // fnames ending in .json
- ("*.txt", true, rparser.Search("keys(@)[@ =~ `txt$`]", all_jsons)), // fnames ending in .txt
- };
- // test string slicer
- int tests_failed = 0;
- int ii = 0;
- foreach ((string search_pattern, bool recursive, JNode desired_files) in testcases)
- {
- grepper.Reset();
- grepper.ReadJsonFiles(smalldir.FullName, recursive, search_pattern);
- JNode found_files = rparser.Search("keys(@)", grepper.fname_jsons);
- ((JArray)found_files).children.Sort();
- ((JArray)desired_files).children.Sort();
- if (found_files.ToString() != desired_files.ToString())
- {
- tests_failed++;
- Console.WriteLine(String.Format("Test {0} (grepper.ReadJsonFiles({1}, {2}, {3})) failed:\n" +
- "Expected to find files\n{4}\nGot files\n{5}",
- ii + 1, subdir.FullName, recursive, search_pattern, desired_files.PrettyPrint(), found_files.PrettyPrint()));
- }
- ii++;
- }
+ // public static void TestFnames()
+ // {
+ // JObject all_jsons = new JObject(0, new Dictionary());
+ // foreach (FileInfo f in smalldir.GetFiles())
+ // {
+ // string jsontxt = File.ReadAllText(f.FullName);
+ // try
+ // {
+ // all_jsons.children[f.FullName] = jparser.Parse(jsontxt);
+ // }
+ // catch { }
+ // }
+ // DirectoryInfo subdir = new DirectoryInfo($"{smalldir.FullName}\\subsmall");
+ // foreach (FileInfo f in subdir.GetFiles())
+ // {
+ // string jsontxt = File.ReadAllText(f.FullName);
+ // try
+ // {
+ // all_jsons.children[f.FullName] = jparser.Parse(jsontxt);
+ // }
+ // catch { }
+ // }
+ // var testcases = new (string search_pattern, bool recursive, JNode desired_files)[]
+ // {
+ // ("*.json", false, rparser.Search("keys(@)[@ =~ `.json$` & not(@ =~ `subsmall`)]", all_jsons)), // fnames not containing submall but ending in .json
+ // ("*.ipynb", false, rparser.Search("keys(@)[@ =~ `ipynb$` & not(@ =~ `subsmall`)]", all_jsons)), // fnames not containing subsmall but ending in .ipynb
+ // ("*.json", true, rparser.Search("keys(@)[@ =~ `json$`]", all_jsons)), // fnames ending in .json
+ // ("*.txt", true, rparser.Search("keys(@)[@ =~ `txt$`]", all_jsons)), // fnames ending in .txt
+ // };
+ // // test string slicer
+ // int tests_failed = 0;
+ // int ii = 0;
+ // foreach ((string search_pattern, bool recursive, JNode desired_files) in testcases)
+ // {
+ // grepper.Reset();
+ // grepper.ReadJsonFiles(smalldir.FullName, recursive, search_pattern);
+ // JNode found_files = rparser.Search("keys(@)", grepper.fname_jsons);
+ // ((JArray)found_files).children.Sort();
+ // ((JArray)desired_files).children.Sort();
+ // if (found_files.ToString() != desired_files.ToString())
+ // {
+ // tests_failed++;
+ // Console.WriteLine(String.Format("Test {0} (grepper.ReadJsonFiles({1}, {2}, {3})) failed:\n" +
+ // "Expected to find files\n{4}\nGot files\n{5}",
+ // ii + 1, subdir.FullName, recursive, search_pattern, desired_files.PrettyPrint(), found_files.PrettyPrint()));
+ // }
+ // ii++;
+ // }
- // test nonstandard JsonParser settings for the grepper
- grepper.json_parser.allow_javascript_comments = true;
- grepper.json_parser.allow_singlequoted_str = true;
- string json_subdir_name = subdir.FullName.Replace("\\", "\\\\");
+ // // test nonstandard JsonParser settings for the grepper
+ // grepper.json_parser.allow_javascript_comments = true;
+ // grepper.json_parser.allow_singlequoted_str = true;
+ // string json_subdir_name = subdir.FullName.Replace("\\", "\\\\");
- var special_testcases = new (string search_pattern, JNode desired_files)[]
- {
- ("*comment*.txt", jparser.Parse($"[\"{json_subdir_name}\\\\comment_json_as_txt.txt\"]")),
- ("*singlequote*.txt", jparser.Parse($"[\"{json_subdir_name}\\\\singlequote_json_as_txt.txt\"]")),
- };
- foreach ((string search_pattern, JNode desired_files) in special_testcases)
- {
- grepper.Reset();
- grepper.ReadJsonFiles(subdir.FullName, false, search_pattern);
- JNode found_files = rparser.Search("keys(@)", grepper.fname_jsons);
- ((JArray)found_files).children.Sort();
- ((JArray)desired_files).children.Sort();
- if (found_files.ToString() != desired_files.ToString())
- {
- tests_failed++;
- Console.WriteLine(String.Format("Test {0} (grepper.ReadJsonFiles({1}, {2}, {3})) failed:\n" +
- "Expected to find files\n{4}\nGot files\n{5}",
- ii + 1, subdir.FullName, false, search_pattern, desired_files.PrettyPrint(), found_files.PrettyPrint()));
- }
- ii++;
- }
+ // var special_testcases = new (string search_pattern, JNode desired_files)[]
+ // {
+ // ("*comment*.txt", jparser.Parse($"[\"{json_subdir_name}\\\\comment_json_as_txt.txt\"]")),
+ // ("*singlequote*.txt", jparser.Parse($"[\"{json_subdir_name}\\\\singlequote_json_as_txt.txt\"]")),
+ // };
+ // foreach ((string search_pattern, JNode desired_files) in special_testcases)
+ // {
+ // grepper.Reset();
+ // grepper.ReadJsonFiles(subdir.FullName, false, search_pattern);
+ // JNode found_files = rparser.Search("keys(@)", grepper.fname_jsons);
+ // ((JArray)found_files).children.Sort();
+ // ((JArray)desired_files).children.Sort();
+ // if (found_files.ToString() != desired_files.ToString())
+ // {
+ // tests_failed++;
+ // Console.WriteLine(String.Format("Test {0} (grepper.ReadJsonFiles({1}, {2}, {3})) failed:\n" +
+ // "Expected to find files\n{4}\nGot files\n{5}",
+ // ii + 1, subdir.FullName, false, search_pattern, desired_files.PrettyPrint(), found_files.PrettyPrint()));
+ // }
+ // ii++;
+ // }
- Console.WriteLine($"Failed {tests_failed} tests.");
- Console.WriteLine($"Passed {ii - tests_failed} tests.");
- }
- }
+ // Console.WriteLine($"Failed {tests_failed} tests.");
+ // Console.WriteLine($"Passed {ii - tests_failed} tests.");
+ // }
+ //}
}
diff --git a/JsonToolsNppPlugin/JSONTools/JsonParser.cs b/JsonToolsNppPlugin/JSONTools/JsonParser.cs
index 9a1da75..4d54be6 100644
--- a/JsonToolsNppPlugin/JSONTools/JsonParser.cs
+++ b/JsonToolsNppPlugin/JSONTools/JsonParser.cs
@@ -6,7 +6,7 @@
using System.Text;
using System.Text.RegularExpressions;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
///
/// An exception thrown when the parser encounters syntactically invalid JSON.
@@ -96,11 +96,16 @@ public class JsonParser
///
/// If "linting" is true, most forms of invalid syntax will not cause the parser to stop, but instead the syntax error will be recorded in a list.
///
- public List? lint = null;
+ public List lint = null;
//public Dictionary escape_map
// no customization for date culture will be available - dates will be recognized as yyyy-mm-dd
// and datetimes will be recognized as YYYY-MM-DDThh:mm:ss.sssZ (the Z at the end indicates that it's UTC)
// see https://stackoverflow.com/questions/10286204/what-is-the-right-json-date-format
+ ///
+ /// position in JSON string
+ ///
+ public int ii = 0;
+ public int line_num = 0;
public JsonParser(bool allow_datetimes = false,
bool allow_singlequoted_str = false,
@@ -126,23 +131,22 @@ public JsonParser(bool allow_datetimes = false,
// return (c == ' ' || c == '\t' || c == '\n' || c == '\r');
//}
- private static (int pos, int line_num) ConsumeWhiteSpace(string q, int ii, int line_num)
+ private void ConsumeWhiteSpace(string inp)
{
char c;
- while (ii < q.Length - 1)
+ while (ii < inp.Length - 1)
{
- c = q[ii];
+ c = inp[ii];
// tried using if/else if, but it's slower
if (c == ' ' || c == '\t' || c == '\r') { ii++; }
else if (c == '\n') { ii++; line_num++; }
- else { break; }
+ else { return; }
}
- return (ii, line_num);
}
- private static (int ii, int line_num) ConsumeComment(string inp, int ii, int line_num)
+ private void ConsumeComment(string inp)
{
- char cur_c = inp[ii];
+ char cur_c;
char next_c;
while (ii < inp.Length - 1 && inp[ii] == '/')
{
@@ -155,8 +159,8 @@ private static (int ii, int line_num) ConsumeComment(string inp, int ii, int lin
cur_c = inp[ii++];
if (cur_c == '\n')
{
- (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num + 1);
- break;
+ ConsumeWhiteSpace(inp);
+ return;
}
}
}
@@ -174,7 +178,7 @@ private static (int ii, int line_num) ConsumeComment(string inp, int ii, int lin
{
comment_ended = true;
ii += 1;
- break;
+ return;
}
}
}
@@ -183,9 +187,21 @@ private static (int ii, int line_num) ConsumeComment(string inp, int ii, int lin
throw new JsonParserException("Unterminated multi-line comment", inp[ii], ii);
}
}
- (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num);
+ ConsumeWhiteSpace(inp);
+ }
+ }
+
+ private void MaybeConsumeComment(string inp)
+ {
+ if (allow_javascript_comments || lint != null)
+ {
+ if (lint != null)
+ {
+ lint.Add(new JsonLint("JavaScript comments are not part of the original JSON specification", ii, line_num, '/'));
+ }
+ ConsumeComment(inp);
}
- return (ii, line_num);
+ throw new JsonParserException("JavaScript comments are not part of the original JSON specification", inp[ii], ii);
}
///
@@ -194,15 +210,11 @@ private static (int ii, int line_num) ConsumeComment(string inp, int ii, int lin
/// or if `index` is less than `length` from the end of `inp`.
///
///
- private static (int, int) ParseHexadecimal(string inp, int index, int length)
+ private int ParseHexadecimal(string inp, int length)
{
var sb = new StringBuilder();
- for (int ii = index; ii < index + length; ii++)
+ for ( ; ii < ii + length && ii < inp.Length; ii++)
{
- if (ii == inp.Length)
- {
- throw new JsonParserException("Could not find hexadecimal of length " + length, inp[ii], ii);
- }
sb.Append(inp[ii]);
}
string s = sb.ToString();
@@ -211,13 +223,14 @@ private static (int, int) ParseHexadecimal(string inp, int index, int length)
try
{
charval = int.Parse(s, System.Globalization.NumberStyles.HexNumber);
- return (charval, index + length - 1);
+ ii += length - 1;
+ return charval;
// the -1 is because ParseString increments by 1 after every escaped sequence anyway
}
catch
{
throw new JsonParserException("Could not find valid hexadecimal of length " + length,
- inp[index + length], index + length);
+ inp[ii + length], ii + length);
}
}
@@ -280,17 +293,17 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
/// a JNode of type Dtype.STR, and the position of the end of the string literal
///
///
- public (JNode str, int pos, int line_num) ParseString(string inp, int startpos, int line_num, char quote_char = '"')
+ public JNode ParseString(string inp, char quote_char = '"')
{
- int ii = startpos + 1;
- char cur_c = inp[ii];
+ ii++;
+ char cur_c;
StringBuilder sb = new StringBuilder();
while (true)
{
if (ii == inp.Length)
{
- if (lint == null) throw new JsonParserException("Unterminated string literal", inp[startpos], startpos);
- lint.Add(new JsonLint($"Unterminated string literal starting at position {startpos}", startpos, line_num, inp[startpos]));
+ if (lint == null) throw new JsonParserException("Unterminated string literal", inp[ii], ii);
+ lint.Add(new JsonLint($"Unterminated string literal starting at position {ii}", ii, line_num, inp[ii]));
break;
}
cur_c = inp[ii];
@@ -299,7 +312,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// internal newlines are not allowed in JSON strings
if (lint == null) throw new JsonParserException("Unterminated string literal", cur_c, ii);
line_num++;
- lint.Add(new JsonLint($"String literal starting at position {startpos} contains newline", startpos, line_num, inp[startpos]));
+ lint.Add(new JsonLint($"String literal starting at position {ii} contains newline", ii, line_num, inp[ii]));
}
if (cur_c == quote_char)
{
@@ -310,7 +323,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
if (ii == inp.Length - 2)
{
if (lint == null) throw new JsonParserException("Unterminated string literal", cur_c, ii);
- lint.Add(new JsonLint($"Unterminated string literal starting at position {startpos}", startpos, line_num, inp[startpos]));
+ lint.Add(new JsonLint($"Unterminated string literal starting at position {ii}", ii, line_num, inp[ii]));
break;
}
char next_char = inp[ii + 1];
@@ -333,7 +346,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// \x and \U escapes are not part of the JSON standard
try
{
- (next_hex, ii) = ParseHexadecimal(inp, ii + 2, 4);
+ ii += 2;
+ next_hex = ParseHexadecimal(inp, 4);
sb.Append((char)next_hex);
}
catch (Exception e)
@@ -361,11 +375,12 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
ii++;
}
+ ii++;
if (allow_datetimes)
{
- return (TryParseDateOrDateTime(sb.ToString(), line_num), ii + 1, line_num);
+ return TryParseDateOrDateTime(sb.ToString(), line_num);
}
- return (new JNode(sb.ToString(), Dtype.STR, line_num), ii + 1, line_num);
+ return new JNode(sb.ToString(), Dtype.STR, line_num);
}
//public static readonly Regex num_regex = new Regex(@"(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?");
@@ -377,14 +392,14 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
/// The starting line number
/// a JNode with type = Dtype.INT or Dtype.FLOAT, and the position of the end of the number.
///
- public (JNode num, int ii, int line_num) ParseNumber(string q, int ii, int line_num)
+ public JNode ParseNumber(string q)
{
StringBuilder sb = new StringBuilder();
// parsed tracks which portions of a number have been parsed.
- // So if the int part has been parsed, it will be "i".
- // If the int and decimal point parts have been parsed, it will be "id".
- // If the int, decimal point, and scientific notation parts have been parsed, it will be "ide"
- string parsed = "i";
+ // So if the int part has been parsed, it will be 1.
+ // If the int and decimal point parts have been parsed, it will be 3.
+ // If the int, decimal point, and scientific notation parts have been parsed, it will be 7
+ int parsed = 1;
char c = q[ii];
if (c == '-' || c == '+')
{
@@ -401,23 +416,23 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
else if (c == '.')
{
- if (parsed != "i")
+ if (parsed != 1)
{
if (lint == null) throw new RemesLexerException(ii, q, "Number with two decimal points");
lint.Add(new JsonLint("Number with two decimal points", ii, line_num, c));
break;
}
- parsed = "id";
+ parsed = 3;
sb.Append('.');
ii++;
}
else if (c == 'e' || c == 'E')
{
- if (parsed.Contains('e'))
+ if ((parsed & 4) != 0)
{
break;
}
- parsed += 'e';
+ parsed += 4;
sb.Append('e');
if (ii < q.Length - 1)
{
@@ -434,20 +449,20 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// fractions are part of the JSON language specification
double numer = double.Parse(sb.ToString());
JNode denom_node;
- (denom_node, ii, line_num) = ParseNumber(q, ii + 1, line_num);
+ denom_node = ParseNumber(q);
double denom = Convert.ToDouble(denom_node.value);
- return (new JNode(numer / denom, Dtype.FLOAT, line_num), ii, line_num);
+ return new JNode(numer / denom, Dtype.FLOAT, line_num);
}
else
{
break;
}
}
- if (parsed == "i")
+ if (parsed == 1)
{
- return (new JNode(long.Parse(sb.ToString()), Dtype.INT, line_num), ii, line_num);
+ return new JNode(long.Parse(sb.ToString()), Dtype.INT, line_num);
}
- return (new JNode(double.Parse(sb.ToString()), Dtype.FLOAT, line_num), ii, line_num);
+ return new JNode(double.Parse(sb.ToString()), Dtype.FLOAT, line_num);
}
///
@@ -465,16 +480,15 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
/// The starting line number
/// a JArray, and the position of the end of the array.
///
- public (JArray arr, int pos, int line_num) ParseArray(string inp, int startpos, int line_num)
+ public JArray ParseArray(string inp)
{
- int start_line_num = line_num;
var children = new List();
- int ii = startpos + 1;
bool already_seen_comma = false;
- char cur_c = inp[ii];
+ int start_line_num = line_num;
+ char cur_c = inp[++ii];
while (ii < inp.Length)
{
- (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num);
+ ConsumeWhiteSpace(inp);
cur_c = inp[ii];
// tried using a switch statement instead of chained if/else if, but it's actually much slower
if (cur_c == ',')
@@ -502,7 +516,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
JArray arr = new JArray(start_line_num, children);
// Console.WriteLine("Returning array " + arr.ToString());
- return (arr, ii + 1, line_num);
+ ii++;
+ return arr;
}
else if (cur_c == '}')
{
@@ -514,9 +529,10 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
lint.Add(new JsonLint("Tried to terminate an array with '}'", ii, line_num, cur_c));
JArray arr = new JArray(start_line_num, children);
- return (arr, ii + 1, line_num);
+ ii++;
+ return arr;
}
- else if (allow_javascript_comments && cur_c == '/') (ii, line_num) = ConsumeComment(inp, ii, line_num);
+ else if (cur_c == '/') MaybeConsumeComment(inp);
else
{
if (children.Count > 0 && !already_seen_comma)
@@ -527,7 +543,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// a new array member of some sort
already_seen_comma = false;
JNode new_obj;
- (new_obj, ii, line_num) = ParseSomething(inp, ii, line_num);
+ new_obj = ParseSomething(inp);
// Console.WriteLine("\nobj = "+new_obj.ToString());
children.Add(new_obj);
}
@@ -552,17 +568,16 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
/// The starting line number
/// a JArray, and the position of the end of the array.
///
- public (JObject obj, int pos, int line_num) ParseObject(string inp, int startpos, int line_num)
+ public JObject ParseObject(string inp)
{
int start_line_num = line_num;
var children = new Dictionary();
- int ii = startpos + 1;
bool already_seen_comma = false;
- char cur_c = inp[ii];
+ char cur_c = inp[++ii];
string child_key;
while (ii < inp.Length)
{
- (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num);
+ ConsumeWhiteSpace(inp);
cur_c = inp[ii];
// tried using a switch statement here - it turns out to be much slower
if (cur_c == ',')
@@ -590,7 +605,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
JObject obj = new JObject(start_line_num, children);
// Console.WriteLine("Returning array " + obj.ToString());
- return (obj, ii + 1, line_num);
+ return obj;
}
else if (cur_c == ']')
{
@@ -602,7 +617,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
}
lint.Add(new JsonLint("Tried to terminate object with ']'", ii, line_num, cur_c));
JObject obj = new JObject(start_line_num, children);
- return (obj, ii + 1, line_num);
+ return obj;
}
else if (cur_c == '"'
|| ((allow_singlequoted_str || lint != null) && cur_c == '\'')
@@ -620,8 +635,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
lint.Add(new JsonLint("Strings must be quoted with \" rather than '", ii, line_num, cur_c));
}
// a new key-value pair
- JNode keystring;
- (keystring, ii, line_num) = ParseString(inp, ii, line_num, cur_c);
+ JNode keystring = ParseString(inp, cur_c);
//child_key = (string)keystring.value;
string child_keystr = keystring.ToString();
child_key = child_keystr.Substring(1, child_keystr.Length - 2);
@@ -629,8 +643,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
{
// avoid call overhead in most likely case where colon comes
// immediately after key
- (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num);
- if (allow_javascript_comments && inp[ii] == '/') (ii, line_num) = ConsumeComment(inp, ii, line_num);
+ ConsumeWhiteSpace(inp);
+ if (cur_c == '/') MaybeConsumeComment(inp);
}
if (inp[ii] != ':')
{
@@ -638,15 +652,14 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
lint.Add(new JsonLint($"No ':' between key {children.Count} and value {children.Count} of object", ii, line_num, cur_c));
ii--;
}
- (ii, line_num) = ConsumeWhiteSpace(inp, ii + 1, line_num);
- if (allow_javascript_comments && inp[ii] == '/') (ii, line_num) = ConsumeComment(inp, ii, line_num);
- JNode new_obj;
- (new_obj, ii, line_num) = ParseSomething(inp, ii, line_num);
+ ConsumeWhiteSpace(inp);
+ if (inp[ii] == '/') MaybeConsumeComment(inp);
+ JNode new_obj = ParseSomething(inp);
// Console.WriteLine($"\nkey = {child_key}, obj = {new_obj.ToString()}");
children.Add(child_key, new_obj);
already_seen_comma = false;
}
- else if (allow_javascript_comments && inp[ii] == '/') (ii, line_num) = ConsumeComment(inp, ii, line_num);
+ else if (inp[ii] == '/') MaybeConsumeComment(inp);
else
{
if (lint == null) throw new JsonParserException($"Key in object (would be key {children.Count}) must be string", cur_c, ii);
@@ -669,18 +682,9 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
/// The starting line number
/// a JArray, and the position of the end of the array.
///
- public (JNode node, int pos, int line_num) ParseSomething(string inp, int ii, int line_num)
+ public JNode ParseSomething(string inp)
{
- // (ii, line_num) = ConsumeWhiteSpace(inp, ii, line_num);
- char cur_c = inp[ii];
- //try
- //{
- // cur_c = inp[ii]; // not doing this now for speed reasons
- //}
- //catch (IndexOutOfRangeException)
- //{
- // throw new JsonParserException("Unexpected end of JSON string", inp[inp.Length - 1], inp.Length - 1);
- //}
+ char cur_c = inp[ii]; // could throw IndexOutOfRangeException, but we'll handle that elsewhere
char next_c;
if (cur_c == '"' || ((allow_singlequoted_str || lint != null) && cur_c == '\''))
{
@@ -688,19 +692,19 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
{
lint.Add(new JsonLint("Strings must be quoted with \" rather than '", ii, line_num, cur_c));
}
- return ParseString(inp, ii, line_num, cur_c);
+ return ParseString(inp, cur_c);
}
if (cur_c >= '0' && cur_c <= '9')
{
- return ParseNumber(inp, ii, line_num);
+ return ParseNumber(inp);
}
if (cur_c == '[')
{
- return ParseArray(inp, ii, line_num);
+ return ParseArray(inp);
}
if (cur_c == '{')
{
- return ParseObject(inp, ii, line_num);
+ return ParseObject(inp);
}
// either a special scalar or a negative number
next_c = inp[ii + 1];
@@ -709,7 +713,7 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// try negative infinity or number
if (next_c >= '0' && next_c <= '9')
{
- return ParseNumber(inp, ii, line_num);
+ return ParseNumber(inp);
}
if (next_c == 'I' && inp.Substring(ii + 2, 7) == "nfinity")
{
@@ -721,8 +725,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
{
lint.Add(new JsonLint("-Infinity is not part of the original JSON specification", ii, line_num, cur_c));
}
- return (new JNode(double.NegativeInfinity, Dtype.FLOAT, line_num),
- ii + 9, line_num);
+ ii += 9;
+ return new JNode(1d, Dtype.FLOAT, line_num); // should return -inf
}
throw new JsonParserException("Expected literal starting with '-' to be negative number",
next_c, ii+1);
@@ -737,7 +741,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// try null
if (next_c == 'u' && inp.Substring(ii + 2, 2) == "ll")
{
- return (new JNode(null, Dtype.NULL, line_num), ii + 4, line_num);
+ ii += 4;
+ return new JNode(null, Dtype.NULL, line_num);
}
throw new JsonParserException("Expected literal starting with 'n' to be null",
next_c, ii+1);
@@ -755,7 +760,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
{
lint.Add(new JsonLint("NaN is not part of the original JSON specification", ii, line_num, cur_c));
}
- return (new JNode(double.NaN, Dtype.FLOAT, line_num), ii + 3, line_num);
+ ii += 3;
+ return new JNode(0d, Dtype.FLOAT, line_num); // should return NaN
}
throw new JsonParserException("Expected literal starting with 'N' to be NaN", next_c, ii+1);
}
@@ -772,8 +778,12 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
{
lint.Add(new JsonLint("Infinity is not part of the original JSON specification", ii, line_num, cur_c));
}
- return (new JNode(double.PositiveInfinity, Dtype.FLOAT, line_num),
- ii + 8, line_num);
+ ii += 8;
+ //double nan = double.NaN;
+ //double neginf = double.NegativeInfinity;
+ double inf = 1d; // double.PositiveInfinity;
+ //if (nan == inf || inf == neginf) { }
+ return new JNode(inf, Dtype.FLOAT, line_num); // should return inf
}
throw new JsonParserException("Expected literal starting with 'I' to be Infinity",
next_c, ii+1);
@@ -783,7 +793,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// try true
if (next_c == 'r' && inp[ii + 2] == 'u' && inp[ii + 3] == 'e')
{
- return (new JNode(true, Dtype.BOOL, line_num), ii + 4, line_num);
+ ii += 4;
+ return new JNode(true, Dtype.BOOL, line_num);
}
throw new JsonParserException("Expected literal starting with 't' to be true", next_c, ii+1);
}
@@ -792,7 +803,8 @@ private static JNode TryParseDateOrDateTime(string maybe_datetime, int line_num)
// try false
if (next_c == 'a' && inp.Substring(ii + 2, 3) == "lse")
{
- return (new JNode(false, Dtype.BOOL, line_num), ii + 5, line_num);
+ ii += 5;
+ return new JNode(false, Dtype.BOOL, line_num);
}
throw new JsonParserException("Expected literal starting with 'f' to be false",
next_c, ii+1);
@@ -813,19 +825,17 @@ public JNode Parse(string inp)
{
throw new JsonParserException("no input");
}
- (int startpos, int line_num) = ConsumeWhiteSpace(inp, 0, 0);
- if (allow_javascript_comments && inp[startpos] == '/')
- {
- (startpos, line_num) = ConsumeComment(inp, startpos, line_num);
- }
- if (startpos >= inp.Length)
+ ii = 0;
+ line_num = 0;
+ ConsumeWhiteSpace(inp);
+ if (inp[ii] == '/') MaybeConsumeComment(inp);
+ if (ii >= inp.Length)
{
throw new JsonParserException("Json string is only whitespace");
}
- JNode node;
try
{
- (node, _, _) = ParseSomething(inp, startpos, line_num);
+ return ParseSomething(inp);
}
catch (Exception e)
{
@@ -835,7 +845,6 @@ public JNode Parse(string inp)
}
throw;
}
- return node;
}
}
#endregion
@@ -864,49 +873,49 @@ public static void Test()
string norm_example = "{\"a\": [-1, true, {\"b\": 0.5, \"c\": \"\\uae77\"}, null], "
+ "\"a\\u10ff\": [true, false, NaN, Infinity, -Infinity, {}, \"\\u043ea\", []], "
+ "\"back'slas\\\"h\": [\"\\\"'\\f\\n\\b\\t/\", -0.5, 23, \"\"]}";
- string pprint_example = "{" +
- NL + "\"a\":" +
- NL + " [" +
- NL + " -1," +
- NL + " true," +
- NL + " {" +
- NL + " \"b\": 0.5," +
- NL + " \"c\": \"\\uae77\"" +
- NL + " }," +
- NL + " null" +
- NL + " ]," +
- NL + "\"a\\u10ff\":" +
- NL + " [" +
- NL + " true," +
- NL + " false," +
- NL + " NaN," +
- NL + " Infinity," +
- NL + " -Infinity," +
- NL + " {" +
- NL + " }," +
- NL + " \"\\u043ea\"," +
- NL + " [" +
- NL + " ]" +
- NL + " ]," +
- NL + "\"back'slas\\\"h\":" +
- NL + " [" +
- NL + " \"\\\"'\\f\\n\\b\\t/\"," +
- NL + " -0.5," +
- NL + " 23," +
- NL + " \"\"" +
- NL + " ]" +
+ string pprint_example = "{" +
+ NL + "\"a\":" +
+ NL + " [" +
+ NL + " -1," +
+ NL + " true," +
+ NL + " {" +
+ NL + " \"b\": 0.5," +
+ NL + " \"c\": \"\\uae77\"" +
+ NL + " }," +
+ NL + " null" +
+ NL + " ]," +
+ NL + "\"a\\u10ff\":" +
+ NL + " [" +
+ NL + " true," +
+ NL + " false," +
+ NL + " NaN," +
+ NL + " Infinity," +
+ NL + " -Infinity," +
+ NL + " {" +
+ NL + " }," +
+ NL + " \"\\u043ea\"," +
+ NL + " [" +
+ NL + " ]" +
+ NL + " ]," +
+ NL + "\"back'slas\\\"h\":" +
+ NL + " [" +
+ NL + " \"\\\"'\\f\\n\\b\\t/\"," +
+ NL + " -0.5," +
+ NL + " 23," +
+ NL + " \"\"" +
+ NL + " ]" +
NL + "}";
- var testcases = new (string, string, string, string)[]
+ var testcases = new string[][]
{
- (example, norm_example, pprint_example, "general parsing"),
- ("1/2", "0.5", "0.5", "fractions"),
- ("[[]]", "[[]]", "[" + NL + " [" + NL + " ]" + NL + "]", "empty lists"),
- ("\"abc\"", "\"abc\"", "\"abc\"", "scalar string"),
- ("1", "1", "1", "scalar int"),
- ("-1.0", "-1.0", "-1.0", "negative scalar float"),
- ("3.5", "3.5", "3.5", "scalar float"),
- ("-4", "-4", "-4", "negative scalar int"),
- ("[{\"FullName\":\"C:\\\\123467756\\\\Z\",\"LastWriteTimeUtc\":\"\\/Date(1600790697130)\\/\"}," +
+ new string[]{ example, norm_example, pprint_example, "general parsing" },
+ new string[] { "1/2", "0.5", "0.5", "fractions" },
+ new string[] { "[[]]", "[[]]", "[" + NL + " [" + NL + " ]" + NL + "]", "empty lists" },
+ new string[] { "\"abc\"", "\"abc\"", "\"abc\"", "scalar string" },
+ new string[] { "1", "1", "1", "scalar int" },
+ new string[] { "-1.0", "-1.0", "-1.0", "negative scalar float" },
+ new string[] { "3.5", "3.5", "3.5", "scalar float" },
+ new string[] { "-4", "-4", "-4", "negative scalar int" },
+ new string[] { "[{\"FullName\":\"C:\\\\123467756\\\\Z\",\"LastWriteTimeUtc\":\"\\/Date(1600790697130)\\/\"}," +
"{\"FullName\":\"C:\\\\123467756\\\\Z\\\\B\",\"LastWriteTimeUtc\":\"\\/Date(1618852147285)\\/\"}]",
"[{\"FullName\": \"C:\\\\123467756\\\\Z\", \"LastWriteTimeUtc\": \"/Date(1600790697130)/\"}, " +
"{\"FullName\": \"C:\\\\123467756\\\\Z\\\\B\", \"LastWriteTimeUtc\": \"/Date(1618852147285)/\"}]",
@@ -920,12 +929,14 @@ public static void Test()
NL + " \"LastWriteTimeUtc\": \"/Date(1618852147285)/\"" +
NL + " }" +
NL + "]",
- "open issue in Kapilratnani's JSON-Viewer regarding forward slashes having '/' stripped"),
+ "open issue in Kapilratnani's JSON-Viewer regarding forward slashes having '/' stripped" },
};
int tests_failed = 0;
int ii = 0;
- foreach ((string input, string norm_input, string pprint_desired, string msg) in testcases)
+ foreach (string[] test in testcases)
{
+ //(string input, string norm_input, string pprint_desired, string msg)
+ string input = test[0], norm_input = test[1], pprint_desired = test[2], msg = test[3];
JNode json = parser.Parse(input);
string norm_str_out = json.ToString();
string pprint_out = json.PrettyPrint(4);
@@ -937,7 +948,7 @@ public static void Test()
{2}
Got
{3} ",
- ii+1, msg, norm_input, norm_str_out));
+ ii + 1, msg, norm_input, norm_str_out));
}
ii++;
if (pprint_out != pprint_desired)
@@ -948,7 +959,7 @@ public static void Test()
{2}
Got
{3} ",
- ii+1, msg, pprint_desired, pprint_out));
+ ii + 1, msg, pprint_desired, pprint_out));
}
ii++;
}
@@ -966,18 +977,20 @@ public static void Test()
{2}
instead got
{3}",
- ii+1, objstr, pp, pp_ch_line));
+ ii + 1, objstr, pp, pp_ch_line));
}
- var linekeys = new (string key, int expected_line)[]
+ var keylines = new object[][]
{
- ("a", 2),
- ("b", 8),
- ("c", 11),
- ("d", 13)
+ new object[]{"a", 2 },
+ new object[]{ "b", 8 },
+ new object[]{ "c", 11 },
+ new object[]{"d", 13 }
};
- foreach ((string key, int expected_line) in linekeys)
+ foreach (object[] kl in keylines)
{
+ string key = (string)kl[0];
+ int expected_line = (int)kl[1];
ii++;
int true_line = obj.children[key].line_num;
if (true_line != expected_line)
@@ -1000,8 +1013,9 @@ instead got
{3}",
ii+1, objstr, tostr, tostr_ch_line));
}
- foreach ((string key, _) in linekeys)
+ foreach (object[] kl in keylines)
{
+ string key = (string)kl[0];
ii++;
int true_line = obj.children[key].line_num;
if (true_line != 0)
@@ -1013,8 +1027,10 @@ instead got
// test if the parser correctly counts line numbers in nested JSON
JObject pp_obj = (JObject)parser.Parse(pp_ch_line);
- foreach ((string key, int expected_line) in linekeys)
+ foreach (object[] kl in keylines)
{
+ string key = (string)kl[0];
+ int expected_line = (int)kl[1];
int true_line = pp_obj.children[key].line_num;
if (true_line != expected_line)
{
@@ -1023,24 +1039,27 @@ instead got
}
}
- var equality_testcases = new (string astr, string bstr, bool a_equals_b)[]
+ var equality_testcases = new object[][]
{
- ("1", "2", false),
- ("1", "1", true),
- ("2.5e3", "2.5e3", true),
- ("2.5e3", "2.2e3", false),
- ("\"a\"", "\"a\"", true),
- ("\"a\"", "\"b\"", false),
- ("[[1, 2], [3, 4]]", "[[1,2],[3,4]]", true),
- ("[1, 2, 3, 4]", "[[1,2], [3,4]]", false),
- ("{\"a\": 1, \"b\": Infinity, \"c\": 0.5}", "{\"b\": Infinity, \"a\": 1, \"c\": 1/2}", true),
- ("[\"z\\\"\"]", "[\"z\\\"\"]", true),
- ("{}", "{" + NL + " }", true),
- ("[]", "[ ]", true),
- ("[]", "[1, 2]", false)
+ new object[] { "1", "2", false },
+ new object[] { "1", "1", true },
+ new object[] { "2.5e3", "2.5e3", true },
+ new object[] { "2.5e3", "2.2e3", false },
+ new object[] { "\"a\"", "\"a\"", true },
+ new object[] { "\"a\"", "\"b\"", false },
+ new object[] { "[[1, 2], [3, 4]]", "[[1,2],[3,4]]", true },
+ new object[] { "[1, 2, 3, 4]", "[[1,2], [3,4]]", false },
+ new object[] { "{\"a\": 1, \"b\": Infinity, \"c\": 0.5}", "{\"b\": Infinity, \"a\": 1, \"c\": 1/2}", true },
+ new object[] { "[\"z\\\"\"]", "[\"z\\\"\"]", true },
+ new object[] { "{}", "{" + NL + " }", true },
+ new object[] { "[]", "[ ]", true },
+ new object[] { "[]", "[1, 2]", false }
};
- foreach ((string astr, string bstr, bool a_equals_b) in equality_testcases)
+ foreach (object[] test in equality_testcases)
{
+ string astr = (string)test[0];
+ string bstr = (string)test[1];
+ bool a_equals_b = (bool)test[2];
ii++;
JNode a = parser.Parse(astr);
JNode b = parser.Parse(bstr);
@@ -1061,49 +1080,51 @@ public static void TestSpecialParserSettings()
{
JsonParser simpleparser = new JsonParser();
JsonParser parser = new JsonParser(true, true, true);
- var testcases = new (string inp, JNode desired_out)[]
+ var testcases = new object[][]
{
- ("{\"a\": 1, // this is a comment\n\"b\": 2}", simpleparser.Parse("{\"a\": 1, \"b\": 2}")),
- (@"[1,
+ new object[]{ "{\"a\": 1, // this is a comment\n\"b\": 2}", simpleparser.Parse("{\"a\": 1, \"b\": 2}") },
+ new object[]{ @"[1,
/* this is a
multiline comment
*/
2]",
simpleparser.Parse("[1, 2]")
- ),
- ("\"2022-06-04\"", new JNode(new DateTime(2022, 6, 4), Dtype.DATE, 0)),
- ("\"1956-11-13 11:17:56.123\"", new JNode(new DateTime(1956, 11, 13, 11, 17, 56, 123), Dtype.DATETIME, 0)),
- ("\"1956-13-12\"", new JNode("1956-13-12", Dtype.STR, 0)), // bad date- month too high
- ("\"1956-11-13 25:56:17\"", new JNode("1956-11-13 25:56:17", Dtype.STR, 0)), // bad datetime- hour too high
- ("\"1956-11-13 \"", new JNode("1956-11-13 ", Dtype.STR, 0)), // bad date- has space at end
- ("['abc', 2, '1999-01-03']", // single-quoted strings
- new JArray(0, new List(new JNode[]{new JNode("abc", Dtype.STR, 0),
+ },
+ new object[]{ "\"2022-06-04\"", new JNode(new DateTime(2022, 6, 4), Dtype.DATE, 0) },
+ new object[]{ "\"1956-11-13 11:17:56.123\"", new JNode(new DateTime(1956, 11, 13, 11, 17, 56, 123), Dtype.DATETIME, 0) },
+ new object[]{ "\"1956-13-12\"", new JNode("1956-13-12", Dtype.STR, 0) }, // bad date- month too high
+ new object[]{ "\"1956-11-13 25:56:17\"", new JNode("1956-11-13 25:56:17", Dtype.STR, 0) }, // bad datetime- hour too high
+ new object[]{ "\"1956-11-13 \"", new JNode("1956-11-13 ", Dtype.STR, 0) }, // bad date- has space at end
+ new object[]{ "['abc', 2, '1999-01-03']", // single-quoted strings
+ new JArray(0, new List(new JNode[]{new JNode("abc", Dtype.STR, 0),
new JNode(Convert.ToInt64(2), Dtype.INT, 0),
- new JNode(new DateTime(1999, 1, 3), Dtype.DATE, 0)}))),
- ("{'a': \"1\", \"b\": 2}", // single quotes and double quotes in same thing
- simpleparser.Parse("{\"a\": \"1\", \"b\": 2}")),
- (@"{'a':
+ new JNode(new DateTime(1999, 1, 3), Dtype.DATE, 0)}))},
+ new object[]{ "{'a': \"1\", \"b\": 2}", // single quotes and double quotes in same thing
+ simpleparser.Parse("{\"a\": \"1\", \"b\": 2}") },
+ new object[]{ @"{'a':
// one comment
// wow, another single-line comment?
// go figure
- [2]}",
- simpleparser.Parse("{\"a\": [2]}")),
- ("{'a': [ /* internal comment */ 2 ]}", simpleparser.Parse("{\"a\": [2]}")),
- ("[1, 2] // trailing comment", simpleparser.Parse("[1, 2]")),
- ("// the comments return!\n[2]", simpleparser.Parse("[2]")),
- (@"
+ [2]}",
+ simpleparser.Parse("{\"a\": [2]}")},
+ new object[]{ "{'a': [ /* internal comment */ 2 ]}", simpleparser.Parse("{\"a\": [2]}") },
+ new object[]{ "[1, 2] // trailing comment", simpleparser.Parse("[1, 2]") },
+ new object[]{ "// the comments return!\n[2]", simpleparser.Parse("[2]") },
+ new object[]{ @"
/* multiline comment
*/
/* followed by another multiline comment */
// followed by a single line comment
/* and then a multiline comment */
[1, 2]
- /* and one last multiline comment */", simpleparser.Parse("[1, 2]"))
+ /* and one last multiline comment */", simpleparser.Parse("[1, 2]") }
};
int tests_failed = 0;
int ii = 0;
- foreach ((string inp, JNode desired_out) in testcases)
+ foreach (object[] test in testcases)
{
+ string inp = (string)test[0];
+ JNode desired_out = (JNode)test[1];
ii++;
JNode result = new JNode(null, Dtype.NULL, 0);
string base_message = $"Expected JsonParser(true, true, true, true).Parse({inp})\nto return\n{desired_out.ToString()}\n";
@@ -1118,7 +1139,7 @@ multiline comment
Console.WriteLine($"{base_message}Instead returned\n{result.ToString()}");
}
}
- catch (Exception ex)
+ catch
{
tests_failed++;
Console.WriteLine($"{base_message}Instead returned\n{result.ToString()}");
@@ -1139,32 +1160,34 @@ public static void TestLinter()
{
JsonParser simpleparser = new JsonParser();
JsonParser parser = new JsonParser(true, true, true, true);
- var testcases = new (string inp, string desired_out, string[] expected_lint)[]
+ var testcases = new object[][]
{
- ("[1, 2]", "[1, 2]", new string[]{ }), // syntactically valid JSON
- ("[1 2]", "[1, 2]", new string[]{"No comma between array members" }),
- ("[1, , 2]", "[1, 2]", new string[]{$"Two consecutive commas after element 0 of array"}),
- ("[1, 2,]", "[1, 2]", new string[]{"Comma after last element of array"}),
- ("[1 2,]", "[1, 2]", new string[]{"No comma between array members", "Comma after last element of array"}),
- ("{\"a\" 1}", "{\"a\": 1}", new string[]{"No ':' between key 0 and value 0 of object"}),
- ("{\"a\": 1 \"b\": 2}", "{\"a\": 1, \"b\": 2}", new string[]{ "No comma after key-value pair 0 in object" }),
- ("[1 \"a\n\"]", "[1, \"a\\n\"]", new string[]{"No comma between array members", "String literal starting at position 4 contains newline"}),
- ("[NaN, -Infinity, Infinity]", "[NaN, -Infinity, Infinity]",
+ new object[]{ "[1, 2]", "[1, 2]", new string[]{ } }, // syntactically valid JSON
+ new object[]{ "[1 2]", "[1, 2]", new string[]{"No comma between array members" } },
+ new object[]{ "[1, , 2]", "[1, 2]", new string[]{$"Two consecutive commas after element 0 of array"} },
+ new object[]{ "[1, 2,]", "[1, 2]", new string[]{"Comma after last element of array"} },
+ new object[]{ "[1 2,]", "[1, 2]", new string[]{"No comma between array members", "Comma after last element of array"} },
+ new object[]{ "{\"a\" 1}", "{\"a\": 1}", new string[]{"No ':' between key 0 and value 0 of object"} },
+ new object[]{ "{\"a\": 1 \"b\": 2}", "{\"a\": 1, \"b\": 2}", new string[]{ "No comma after key-value pair 0 in object" } },
+ new object[]{ "[1 \"a\n\"]", "[1, \"a\\n\"]", new string[]{"No comma between array members", "String literal starting at position 4 contains newline"} },
+ new object[]{ "[NaN, -Infinity, Infinity]", "[NaN, -Infinity, Infinity]",
new string[]{ "NaN is not part of the original JSON specification",
"-Infinity is not part of the original JSON specification",
- "Infinity is not part of the original JSON specification" }),
- ("{'a\n':[1,2,},]", "{\"a\\n\": [1,2]}", new string[]{"Strings must be quoted with \" rather than '",
+ "Infinity is not part of the original JSON specification" } },
+ new object[]{ "{'a\n':[1,2,},]", "{\"a\\n\": [1,2]}", new string[]{"Strings must be quoted with \" rather than '",
"String literal starting at position 1 contains newline",
- "Comma after last element of array",
+ "Comma after last element of array",
"Tried to terminate an array with '}'",
"Comma after last key-value pair of object",
- "Tried to terminate object with ']'"}),
+ "Tried to terminate object with ']'"} },
};
int tests_failed = 0;
int ii = 0;
- foreach ((string inp, string desired_out, string[] expected_lint) in testcases)
+ foreach (object[] test in testcases)
{
+ string inp = (string)test[0], desired_out = (string)test[1];
+ string[] expected_lint = (string[])test[2];
ii++;
JNode jdesired = simpleparser.Parse(desired_out);
JNode result = new JNode(null, Dtype.NULL, 0);
@@ -1196,7 +1219,7 @@ public static void TestLinter()
Console.WriteLine($"{base_message}Instead returned\n{result.ToString()} and had lint {lint_str}");
}
}
- catch (Exception ex)
+ catch
{
tests_failed++;
Console.WriteLine($"{base_message}Instead returned\n{result.ToString()} and had lint {lint_str}");
diff --git a/JsonToolsNppPlugin/JSONTools/JsonSchema.cs b/JsonToolsNppPlugin/JSONTools/JsonSchema.cs
index 8cfb95c..a57066c 100644
--- a/JsonToolsNppPlugin/JSONTools/JsonSchema.cs
+++ b/JsonToolsNppPlugin/JSONTools/JsonSchema.cs
@@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Linq;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
///
/// This class generates a minimalist JSON schema for a JSON object.
@@ -256,11 +256,12 @@ public Dictionary BuildSchema(JNode obj)
{
JObject oobj = (JObject)obj;
var props = new Dictionary();
- foreach ((string k, JNode v) in oobj.children)
+ foreach (string k in oobj.children.Keys)
{
- props[k] = BuildSchema(v);
+ props[k] = BuildSchema(oobj.children[k]);
}
- schema.TryAdd("required", new HashSet(oobj.children.Keys));
+ if (!schema.ContainsKey("required"))
+ schema["required"] = new HashSet(oobj.children.Keys);
schema["properties"] = props;
}
return schema;
@@ -296,9 +297,9 @@ public JNode SchemaToJNode(object schema)
if (schema is Dictionary)
{
var kids = new Dictionary();
- foreach ((string k, object v) in (Dictionary)schema)
+ foreach (KeyValuePair kv in (Dictionary)schema)
{
- kids[k] = SchemaToJNode(v);
+ kids[kv.Key] = SchemaToJNode(kv.Value);
}
return new JObject(0, kids);
}
@@ -330,7 +331,8 @@ public JNode SchemaToJNode(object schema)
public JNode GetSchema(JNode obj)
{
var schema = new Dictionary(BASE_SCHEMA);
- foreach ((string k, object v) in BuildSchema(obj)) { schema[k] = v; }
+ foreach (KeyValuePair kv in BuildSchema(obj))
+ schema[kv.Key] = kv.Value;
return SchemaToJNode(schema);
}
}
@@ -341,12 +343,12 @@ public static void Test()
{
JsonParser jsonParser = new JsonParser();
JsonSchemaMaker sch_maker = new JsonSchemaMaker();
- (string inp, string desired_out)[] testcases = new (string inp, string desired_out)[]
+ string[][] testcases = new string[][]
{
- ("[1, \"1\"]", "{\"type\": \"array\", \"items\": {\"type\": [\"integer\", \"string\"]}}"),
- ("{\"a\": 1}", "{\"type\": \"object\", \"properties\": {\"a\": {\"type\": \"integer\"}}, \"required\": [\"a\"]}"),
- ("[{\"a\": 1, \"b\": \"w\", \"c\": 1.0}, " +
- "{\"a\": \"2\", \"b\": \"v\"}]",
+ new string[]{ "[1, \"1\"]", "{\"type\": \"array\", \"items\": {\"type\": [\"integer\", \"string\"]}}" },
+ new string[]{ "{\"a\": 1}", "{\"type\": \"object\", \"properties\": {\"a\": {\"type\": \"integer\"}}, \"required\": [\"a\"]}" },
+ new string[]{ "[{\"a\": 1, \"b\": \"w\", \"c\": 1.0}, " +
+ "{\"a\": \"2\", \"b\": \"v\"}]",
"{\"type\": \"array\", " +
"\"items\": " +
"{\"type\": \"object\", " +
@@ -357,8 +359,9 @@ public static void Test()
"}," +
"\"required\": [\"a\", \"b\"]" +
"}" +
- "}"),
- ("[[1, 2.0, {\"a\": 1}]]",
+ "}"
+ },
+ new string[]{"[[1, 2.0, {\"a\": 1}]]",
"{" +
"\"type\": \"array\"," +
"\"items\": " +
@@ -379,8 +382,9 @@ public static void Test()
"]" +
"}" +
"}" +
- "}"),
- ("[" +
+ "}"
+ },
+ new string[]{"[" +
"{" +
"\"a\": 3" +
"}," +
@@ -462,19 +466,22 @@ public static void Test()
"\"a\"" +
"]" +
"}" +
- "}"), // nested JSON object schema
+ "}"
+ }, // nested JSON object schema
};
int ii = 0;
int tests_failed = 0;
JObject base_schema_j = (JObject)sch_maker.SchemaToJNode(JsonSchemaMaker.BASE_SCHEMA);
- foreach ((string inp, string desired_out) in testcases)
+ foreach (string[] test in testcases)
{
+ string inp = test[0];
+ string desired_out = test[1];
ii++;
JNode jinp = jsonParser.Parse(inp);
JObject desired_schema = (JObject)jsonParser.Parse(desired_out);
- foreach ((string k, JNode v) in base_schema_j.children)
+ foreach (string k in base_schema_j.children.Keys)
{
- desired_schema.children[k] = v;
+ desired_schema.children[k] = base_schema_j.children[k];
}
string desired_sch_str = desired_schema.ToString();
JNode schema = new JNode(null, Dtype.NULL, 0);
@@ -489,7 +496,7 @@ public static void Test()
Console.WriteLine($"Expected the schema for {inp} to be\n{desired_sch_str}\nInstead got\n{schema.ToString()}");
}
}
- catch (Exception e)
+ catch
{
// probably because of something like trying to compare an array to a non-array
tests_failed++;
diff --git a/JsonToolsNppPlugin/JSONTools/JsonTabularize.cs b/JsonToolsNppPlugin/JSONTools/JsonTabularize.cs
index ca62f09..f5b1589 100644
--- a/JsonToolsNppPlugin/JSONTools/JsonTabularize.cs
+++ b/JsonToolsNppPlugin/JSONTools/JsonTabularize.cs
@@ -9,7 +9,7 @@ Uses an algorithm to flatten nested JSON.
using System.Linq;
using System.Text;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
public enum JsonTabularizerStrategy
{
@@ -338,7 +338,7 @@ private void FindTabsInSchemaHelper(Dictionary schema, List)schema["items"];
List
public class RemesParser
{
+ public static RemesPathLexer lexer = new RemesPathLexer();
+ ///
+ /// A LRU cache mapping queries to compiled results that the parser can check against
+ /// to save time on parsing.
+ /// May not be used if parsing is really fast and so caching is unnecessary
+ ///
public QueryCache cache;
///
@@ -210,10 +241,10 @@ public JNode Compile(string query)
{
//// turns out compiling queries is very fast (tens of microseconds for a simple query),
//// so caching old queries doesn't save much time
- // JNode? old_result = cache.Check(query);
+ // JNode old_result = cache.Check(query);
// if (old_result != null) { return old_result; }
- List toks = RemesPathLexer.Tokenize(query);
- (JNode result, _) = ParseExprOrScalarFunc(toks, 0);
+ List toks = lexer.Tokenize(query);
+ JNode result = (JNode)ParseExprOrScalarFunc(toks, 0).obj;
//cache.Add(query, result);
return result;
}
@@ -239,21 +270,22 @@ public JNode Search(string query, JNode obj)
public static string INDEXER_STARTERS = ".[{";
#region INDEXER_FUNCTIONS
- private Func> ApplyMultiIndex(object inds, bool is_varname_list, bool is_recursive = false)
+ private Func> ApplyMultiIndex(object inds, bool is_varname_list, bool is_recursive = false)
{
if (inds is CurJson)
{
- IEnumerable<(object, JNode)> multi_idx_func(JNode x)
+ IEnumerable multi_idx_func(JNode x)
{
return ApplyMultiIndex(((CurJson)inds).function(x), is_varname_list, is_recursive)(x);
}
+ return multi_idx_func;
}
var children = (List)inds;
if (is_varname_list)
{
if (is_recursive)
{
- IEnumerable<(object, JNode)> multi_idx_func(JNode x, string path, HashSet paths_visited)
+ IEnumerable multi_idx_func(JNode x, string path, HashSet paths_visited)
{
if (x is JArray)
{
@@ -262,9 +294,9 @@ public JNode Search(string query, JNode obj)
JArray xarr = (JArray)x;
for (int ii = 0; ii < xarr.Length; ii++)
{
- foreach ((object k, JNode v) in multi_idx_func(xarr.children[ii], path + ',' + ii.ToString(), paths_visited))
+ foreach (Key_Node kv in multi_idx_func(xarr.children[ii], path + ',' + ii.ToString(), paths_visited))
{
- yield return (k, v);
+ yield return kv;
}
}
}
@@ -279,20 +311,19 @@ public JNode Search(string query, JNode obj)
{
string strv = (string)v;
if (path == "") path = strv;
- foreach ((string k, JNode val) in xobj.children)
+ foreach (string k in xobj.children.Keys)
{
+ JNode val = xobj.children[k];
string newpath = path + ',' + new JNode(k, Dtype.STR, 0).ToString();
if (k == strv)
{
- if (!paths_visited.Contains(newpath)) yield return (0, val);
+ if (!paths_visited.Contains(newpath)) yield return new Key_Node(0, val);
paths_visited.Add(newpath);
}
else
{
- foreach ((_, JNode subval) in multi_idx_func(val, newpath, paths_visited))
- {
- yield return (0, subval);
- }
+ foreach (Key_Node ono in multi_idx_func(val, newpath, paths_visited))
+ yield return ono;
}
}
}
@@ -300,19 +331,20 @@ public JNode Search(string query, JNode obj)
{
Regex regv = (Regex)v;
if (path == "") path = regv.ToString();
- foreach ((string k, JNode val) in xobj.children)
+ foreach (string k in xobj.children.Keys)
{
+ JNode val = xobj.children[k];
string newpath = path + ',' + new JNode(k, Dtype.STR, 0).ToString();
if (regv.IsMatch(k))
{
- if (!paths_visited.Contains(newpath)) yield return (0, val);
+ if (!paths_visited.Contains(newpath)) yield return new Key_Node(0, val);
paths_visited.Add(newpath);
}
else
{
- foreach ((_, JNode subval) in multi_idx_func(val, newpath, paths_visited))
+ foreach (Key_Node ono in multi_idx_func(val, newpath, paths_visited))
{
- yield return (0, subval);
+ yield return new Key_Node(0, ono.node);
}
}
}
@@ -324,7 +356,7 @@ public JNode Search(string query, JNode obj)
}
else
{
- IEnumerable<(object, JNode)> multi_idx_func(JNode x)
+ IEnumerable multi_idx_func(JNode x)
{
var xobj = (JObject)x;
foreach (object v in children)
@@ -334,14 +366,14 @@ public JNode Search(string query, JNode obj)
string vstr = (string)v;
if (xobj.children.TryGetValue(vstr, out JNode val))
{
- yield return (vstr, val);
+ yield return new Key_Node(vstr, val);
}
}
else
{
- foreach ((string vstr, JNode val) in ApplyRegexIndex(xobj, (Regex)v))
+ foreach (Key_Node ono in ApplyRegexIndex(xobj, (Regex)v))
{
- yield return (vstr, val);
+ yield return ono;
}
}
}
@@ -357,7 +389,7 @@ public JNode Search(string query, JNode obj)
// decide whether to implement recursive search for slices and indices
throw new NotImplementedException("Recursive search for array indices and slices is not implemented");
}
- IEnumerable<(object, JNode)> multi_idx_func(JNode x)
+ IEnumerable multi_idx_func(JNode x)
{
JArray xarr = (JArray)x;
foreach (object ind in children)
@@ -367,7 +399,7 @@ public JNode Search(string query, JNode obj)
// it's a slice, so yield all the JNodes in that slice
foreach (JNode subind in xarr.children.LazySlice((int?[])ind))
{
- yield return (0, subind);
+ yield return new Key_Node(0, subind);
}
}
else
@@ -375,7 +407,7 @@ public JNode Search(string query, JNode obj)
int ii = Convert.ToInt32(ind);
if (ii >= xarr.Length) { continue; }
// allow negative indices for consistency with how slicers work
- yield return (0, xarr.children[ii >= 0 ? ii : ii + xarr.Length]);
+ yield return new Key_Node(0, xarr.children[ii >= 0 ? ii : ii + xarr.Length]);
}
}
}
@@ -383,20 +415,21 @@ public JNode Search(string query, JNode obj)
}
}
- private IEnumerable<(string key, JNode val)> ApplyRegexIndex(JObject obj, Regex regex)
+ private IEnumerable ApplyRegexIndex(JObject obj, Regex regex)
{
- foreach ((string ok, JNode val) in obj.children)
+ foreach (string ok in obj.children.Keys)
{
+ JNode val = obj.children[ok];
if (regex.IsMatch(ok))
{
- yield return (ok, val);
+ yield return new Key_Node(ok, val);
}
}
}
- private Func> ApplyBooleanIndex(JNode inds)
+ private Func> ApplyBooleanIndex(JNode inds)
{
- IEnumerable<(object key, JNode node)> bool_idxr_func(JNode x)
+ IEnumerable bool_idxr_func(JNode x)
{
JNode newinds = inds;
if (inds is CurJson)
@@ -412,9 +445,10 @@ public JNode Search(string query, JNode obj)
{
if (x.type == Dtype.OBJ)
{
- foreach ((string key, JNode val) in ((JObject)x).children)
+ JObject xobj = (JObject)x;
+ foreach (string key in xobj.children.Keys)
{
- yield return (key, val);
+ yield return new Key_Node(key, xobj.children[key]);
}
}
else if (x.type == Dtype.ARR)
@@ -422,7 +456,7 @@ public JNode Search(string query, JNode obj)
JArray xarr = (JArray)x;
for (int ii = 0; ii < xarr.Length; ii++)
{
- yield return (ii, xarr.children[ii]);
+ yield return new Key_Node(ii, xarr.children[ii]);
}
}
}
@@ -437,8 +471,9 @@ public JNode Search(string query, JNode obj)
{
throw new VectorizedArithmeticException($"bool index length {iobj.Length} does not match object/array length {xobj.Length}.");
}
- foreach ((string key, JNode xval) in xobj.children)
+ foreach (string key in xobj.children.Keys)
{
+ JNode xval = xobj.children[key];
bool i_has_key = iobj.children.TryGetValue(key, out JNode ival);
if (i_has_key)
{
@@ -448,7 +483,7 @@ public JNode Search(string query, JNode obj)
}
if ((bool)ival.value)
{
- yield return (key, xval);
+ yield return new Key_Node(key, xval);
}
}
}
@@ -472,7 +507,7 @@ public JNode Search(string query, JNode obj)
}
if ((bool)ival.value)
{
- yield return (ii, xval);
+ yield return new Key_Node(ii, xval);
}
}
yield break;
@@ -481,42 +516,44 @@ public JNode Search(string query, JNode obj)
return bool_idxr_func;
}
- private IEnumerable<(object key, JNode val)> ApplyStarIndexer(JNode x)
+ private IEnumerable ApplyStarIndexer(JNode x)
{
if (x.type == Dtype.OBJ)
{
var xobj = (JObject)x;
- foreach ((string key, JNode val) in xobj.children)
+ foreach (string key in xobj.children.Keys)
{
- yield return (key, val);
+ yield return new Key_Node(key, xobj.children[key]);
}
yield break;
}
var xarr = (JArray)x;
for (int ii = 0; ii < xarr.Length; ii++)
{
- yield return (ii, xarr.children[ii]);
+ yield return new Key_Node(ii, xarr.children[ii]);
}
}
///
- /// return null if x is not an object or array
- /// If it is an object or array, return true if its length is 0.
+ /// return 2 if x is not an object or array
+ /// If it is an object or array:
+ /// return 1 if its length is 0.
+ /// else return 0.
///
///
///
- private static bool? ObjectOrArrayEmpty(JNode x)
+ private static int ObjectOrArrayEmpty(JNode x)
{
- if (x.type == Dtype.OBJ) { return ((JObject)x).Length == 0; }
- if (x.type == Dtype.ARR) { return ((JArray)x).Length == 0; }
- return null;
+ if (x.type == Dtype.OBJ) { return (((JObject)x).Length == 0) ? 1 : 0; }
+ if (x.type == Dtype.ARR) { return (((JArray)x).Length == 0) ? 1 : 0; }
+ return 2;
}
private Func ApplyIndexerList(List indexers)
{
- JNode idxr_list_func(JNode obj, List indexers)
+ JNode idxr_list_func(JNode obj, List idxrs)
{
- IndexerFunc ix = indexers[0];
+ IndexerFunc ix = idxrs[0];
var inds = ix.idxr(obj).GetEnumerator();
// IEnumerator.MoveNext returns a bool indicating if the enumerator has passed the end of the collection
if (!inds.MoveNext())
@@ -528,7 +565,12 @@ JNode idxr_list_func(JNode obj, List indexers)
}
return new JArray(0, new List());
}
- (object k1, JNode v1) = inds.Current;
+ Key_Node k1v1 = inds.Current;
+ object k1 = k1v1.obj;
+ JNode v1 = k1v1.node;
+ Key_Node kv;
+ object k;
+ JNode v;
bool is_dict = (ix.is_dict || k1 is string) && !ix.is_recursive;
var arr = new List();
var dic = new Dictionary();
@@ -545,8 +587,8 @@ JNode idxr_list_func(JNode obj, List indexers)
dic[(string)k1] = v1;
while (inds.MoveNext())
{
- (object k, JNode v) = inds.Current;
- dic[(string)k] = v;
+ kv = inds.Current;
+ dic[(string)kv.obj] = kv.node;
}
return new JObject(0, dic);
}
@@ -554,12 +596,13 @@ JNode idxr_list_func(JNode obj, List indexers)
arr.Add(v1);
while (inds.MoveNext())
{
- (_, JNode v) = inds.Current;
- arr.Add(v);
+ arr.Add(inds.Current.node);
}
return new JArray(0, arr);
}
- var remaining_idxrs = indexers.TakeLast(indexers.Count - 1).ToList();
+ var remaining_idxrs = new List();
+ for (int ii = 1; ii < indexers.Count; ii++)
+ remaining_idxrs.Add(indexers[ii]);
if (ix.is_projection)
{
if (is_dict)
@@ -568,8 +611,8 @@ JNode idxr_list_func(JNode obj, List indexers)
dic[(string)k1] = v1;
while (inds.MoveNext())
{
- (object k, JNode v) = inds.Current;
- dic[(string)k] = v;
+ kv = inds.Current;
+ dic[(string)kv.obj] = kv.node;
}
// recursively search this projection using the remaining indexers
return idxr_list_func(new JObject(0, dic), remaining_idxrs);
@@ -578,8 +621,7 @@ JNode idxr_list_func(JNode obj, List indexers)
arr.Add(v1);
while (inds.MoveNext())
{
- (_, JNode v) = inds.Current;
- arr.Add(v);
+ arr.Add(inds.Current.node);
}
return idxr_list_func(new JArray(0, arr), remaining_idxrs);
}
@@ -588,20 +630,21 @@ JNode idxr_list_func(JNode obj, List indexers)
{
return v1_subdex;
}
- bool? is_empty = ObjectOrArrayEmpty(v1_subdex);
+ int is_empty = ObjectOrArrayEmpty(v1_subdex);
if (is_dict)
{
dic = new Dictionary();
- if (!is_empty.HasValue || !is_empty.Value)
+ if (is_empty != 1)
{
dic[(string)k1] = v1_subdex;
}
while (inds.MoveNext())
{
- (object k, JNode v) = inds.Current;
+ k = inds.Current.obj;
+ v = inds.Current.node;
JNode subdex = idxr_list_func(v, remaining_idxrs);
is_empty = ObjectOrArrayEmpty(subdex);
- if (!is_empty.HasValue || !is_empty.Value)
+ if (is_empty != 1)
{
dic[(string)k] = subdex;
}
@@ -610,16 +653,16 @@ JNode idxr_list_func(JNode obj, List indexers)
}
// obj is a list iterator
arr = new List();
- if (!is_empty.HasValue || !is_empty.Value)
+ if (is_empty != 1)
{
arr.Add(v1_subdex);
}
while (inds.MoveNext())
{
- (_, JNode v) = inds.Current;
+ v = inds.Current.node;
JNode subdex = idxr_list_func(v, remaining_idxrs);
is_empty = ObjectOrArrayEmpty(subdex);
- if (!is_empty.HasValue || !is_empty.Value)
+ if (is_empty != 1)
{
arr.Add(subdex);
}
@@ -633,15 +676,15 @@ JNode idxr_list_func(JNode obj, List indexers)
#region BINOP_FUNCTIONS
private JNode BinopTwoJsons(Binop b, JNode left, JNode right)
{
- if (ObjectOrArrayEmpty(right) == null)
+ if (ObjectOrArrayEmpty(right) == 2)
{
- if (ObjectOrArrayEmpty(left) == null)
+ if (ObjectOrArrayEmpty(left) == 2)
{
return b.Call(left, right);
}
return BinopJsonScalar(b, left, right);
}
- if (ObjectOrArrayEmpty(left) == null)
+ if (ObjectOrArrayEmpty(left) == 2)
{
return BinopScalarJson(b, left, right);
}
@@ -654,8 +697,9 @@ private JNode BinopTwoJsons(Binop b, JNode left, JNode right)
{
throw new VectorizedArithmeticException("Tried to apply a binop to two dicts with different sets of keys");
}
- foreach ((string key, JNode right_val) in robj.children)
+ foreach (string key in robj.children.Keys)
{
+ JNode right_val = robj.children[key];
bool left_has_key = lobj.children.TryGetValue(key, out JNode left_val);
if (!left_has_key)
{
@@ -685,9 +729,9 @@ private JNode BinopJsonScalar(Binop b, JNode left, JNode right)
{
var dic = new Dictionary();
var lobj = (JObject)left;
- foreach ((string key, JNode left_val) in lobj.children)
+ foreach (string key in lobj.children.Keys)
{
- dic[key] = b.Call(left_val, right);
+ dic[key] = b.Call(lobj.children[key], right);
}
return new JObject(0, dic);
}
@@ -706,9 +750,9 @@ private JNode BinopScalarJson(Binop b, JNode left, JNode right)
{
var dic = new Dictionary();
var robj = (JObject)right;
- foreach ((string key, JNode right_val) in robj.children)
+ foreach (string key in robj.children.Keys)
{
- dic[key] = b.Call(left, right_val);
+ dic[key] = b.Call(left, robj.children[key]);
}
return new JObject(0, dic);
}
@@ -952,9 +996,9 @@ JNode arg_outfunc(JNode inp)
{
var dic = new Dictionary();
var otbl = (JObject)itbl;
- foreach ((string key, JNode val) in otbl.children)
+ foreach (string key in otbl.children.Keys)
{
- all_args[0] = val;
+ all_args[0] = otbl.children[key];
dic[key] = func.function.Call(all_args);
}
return new JObject(0, dic);
@@ -994,9 +1038,9 @@ JNode arg_outfunc(JNode inp)
{
var dic = new Dictionary();
var otbl = (JObject)itbl;
- foreach ((string key, JNode val) in otbl.children)
+ foreach (string key in otbl.children.Keys)
{
- all_args[0] = val;
+ all_args[0] = otbl.children[key];
dic[key] = func.function.Call(all_args);
}
return new JObject(0, dic);
@@ -1033,9 +1077,9 @@ JNode arg_outfunc(JNode inp)
JNode other_arg = other_args[ii];
all_args[ii + 1] = other_arg is CurJson ? ((CurJson)other_arg).function(inp) : other_arg;
}
- foreach ((string key, JNode val) in xobj.children)
+ foreach (string key in xobj.children.Keys)
{
- all_args[0] = val;
+ all_args[0] = xobj.children[key];
dic[key] = func.function.Call(all_args);
}
return new JObject(0, dic);
@@ -1091,9 +1135,9 @@ JNode arg_outfunc(JNode inp)
{
var xobj = (JObject)x;
var dic = new Dictionary();
- foreach ((string key, JNode val) in xobj.children)
+ foreach (string key in xobj.children.Keys)
{
- all_args[0] = val;
+ all_args[0] = xobj.children[key];
dic[key] = func.function.Call(all_args);
}
return new JObject(0, dic);
@@ -1179,28 +1223,28 @@ JNode arg_outfunc(JNode inp)
#endregion
#region PARSER_FUNCTIONS
- private static object? PeekNextToken(List toks, int ii)
+ private static object PeekNextToken(List toks, int pos)
{
- if (ii + 1 >= toks.Count) { return null; }
- return toks[ii + 1];
+ if (pos + 1 >= toks.Count) { return null; }
+ return toks[pos + 1];
}
- private (JSlicer, int ii) ParseSlicer(List toks, int ii, int? first_num)
+ private Obj_Pos ParseSlicer(List toks, int pos, int? first_num)
{
var slicer = new int?[3];
int slots_filled = 0;
int? last_num = first_num;
- while (ii < toks.Count)
+ while (pos < toks.Count)
{
- object t = toks[ii];
+ object t = toks[pos];
if (t is char)
{
char tval = (char)t;
if (tval == ':')
{
- slicer[slots_filled++] = last_num;
+ slicer[slots_filled++] = (int)last_num;
last_num = null;
- ii++;
+ pos++;
continue;
}
else if (EXPR_FUNC_ENDERS.Contains(tval))
@@ -1210,15 +1254,16 @@ JNode arg_outfunc(JNode inp)
}
try
{
- JNode numtok;
- (numtok, ii) = ParseExprOrScalarFunc(toks, ii);
+ Obj_Pos npo = ParseExprOrScalarFunc(toks, pos);
+ JNode numtok = (JNode)npo.obj;
+ pos = npo.pos;
if (numtok.type != Dtype.INT)
{
throw new ArgumentException();
}
last_num = Convert.ToInt32(numtok.value);
}
- catch (Exception e)
+ catch (Exception)
{
throw new RemesPathException("Found non-integer while parsing a slicer");
}
@@ -1229,7 +1274,7 @@ JNode arg_outfunc(JNode inp)
}
slicer[slots_filled++] = last_num;
slicer = slicer.Take(slots_filled).ToArray();
- return (new JSlicer(slicer), ii);
+ return new Obj_Pos(new JSlicer(slicer), pos);
}
private static object GetSingleIndexerListValue(JNode ind)
@@ -1244,10 +1289,10 @@ private static object GetSingleIndexerListValue(JNode ind)
}
}
- private (Indexer indexer, int ii) ParseIndexer(List toks, int ii)
+ private Obj_Pos ParseIndexer(List toks, int pos)
{
- object t = toks[ii];
- object? nt;
+ object t = toks[pos];
+ object nt;
if (!(t is char))
{
throw new RemesPathException("Expected delimiter at the start of indexer");
@@ -1256,13 +1301,13 @@ private static object GetSingleIndexerListValue(JNode ind)
List children = new List();
if (d == '.')
{
- nt = PeekNextToken(toks, ii);
+ nt = PeekNextToken(toks, pos);
if (nt != null)
{
if (nt is Binop && ((Binop)nt).name == "*")
{
// it's a '*' indexer, which means select all keys/indices
- return (new StarIndexer(), ii + 2);
+ return new Obj_Pos(new StarIndexer(), pos + 2);
}
JNode jnt = (JNode)nt;
if ((jnt.type & Dtype.STR_OR_REGEX) == 0)
@@ -1278,35 +1323,35 @@ private static object GetSingleIndexerListValue(JNode ind)
{
children.Add(jnt.value);
}
- return (new VarnameList(children), ii + 2);
+ return new Obj_Pos(new VarnameList(children), pos + 2);
}
}
else if (d == '{')
{
- return ParseProjection(toks, ii+1);
+ return ParseProjection(toks, pos+1);
}
else if (d != '[')
{
throw new RemesPathException("Indexer must start with '.' or '[' or '{'");
}
- Indexer? indexer = null;
- object? last_tok = null;
+ Indexer indexer = null;
+ object last_tok = null;
JNode jlast_tok;
Dtype last_type = Dtype.UNKNOWN;
- t = toks[++ii];
+ t = toks[++pos];
if (t is Binop && ((Binop)t).name == "*")
{
// it was '*', indicating a star indexer
- nt = PeekNextToken(toks, ii);
+ nt = PeekNextToken(toks, pos);
if (nt is char && (char)nt == ']')
{
- return (new StarIndexer(), ii + 2);
+ return new Obj_Pos(new StarIndexer(), pos + 2);
}
throw new RemesPathException("Unacceptable first token '*' for indexer list");
}
- while (ii < toks.Count)
+ while (pos < toks.Count)
{
- t = toks[ii];
+ t = toks[pos];
if (t is char)
{
d = (char)t;
@@ -1343,7 +1388,7 @@ private static object GetSingleIndexerListValue(JNode ind)
throw new RemesPathException("Cannot have indexers with a mix of ints/slicers and " +
"strings/regexes");
}
- return (indexer, ii + 1);
+ return new Obj_Pos(indexer, pos + 1);
}
if (d == ',')
{
@@ -1365,13 +1410,15 @@ private static object GetSingleIndexerListValue(JNode ind)
children.Add(GetSingleIndexerListValue((JNode)last_tok));
last_tok = null;
last_type = Dtype.UNKNOWN;
- ii++;
+ pos++;
}
else if (d == ':')
{
if (last_tok == null)
{
- (last_tok, ii) = ParseSlicer(toks, ii, null);
+ Obj_Pos opo = ParseSlicer(toks, pos, null);
+ last_tok = opo.obj;
+ pos = opo.pos;
}
else if (last_tok is JNode)
{
@@ -1381,7 +1428,9 @@ private static object GetSingleIndexerListValue(JNode ind)
throw new RemesPathException($"Expected token other than ':' after {jlast_tok} " +
$"in an indexer");
}
- (last_tok, ii) = ParseSlicer(toks, ii, Convert.ToInt32(jlast_tok.value));
+ Obj_Pos opo = ParseSlicer(toks, pos, Convert.ToInt32(jlast_tok.value));
+ last_tok = opo.obj;
+ pos = opo.pos;
}
else
{
@@ -1401,21 +1450,23 @@ private static object GetSingleIndexerListValue(JNode ind)
else
{
// it's a new token of some sort
- (last_tok, ii) = ParseExprOrScalarFunc(toks, ii);
+ Obj_Pos opo = ParseExprOrScalarFunc(toks, pos);
+ last_tok = opo.obj;
+ pos = opo.pos;
last_type = ((JNode)last_tok).type;
}
}
throw new RemesPathException("Unterminated indexer");
}
- private (JNode node, int ii) ParseExprOrScalar(List toks, int ii)
+ private Obj_Pos ParseExprOrScalar(List toks, int pos)
{
if (toks.Count == 0)
{
throw new RemesPathException("Empty query");
}
- object t = toks[ii];
- JNode? last_tok = null;
+ object t = toks[pos];
+ JNode last_tok = null;
if (t is Binop)
{
throw new RemesPathException($"Binop {(Binop)t} without appropriate left operand");
@@ -1425,11 +1476,11 @@ private static object GetSingleIndexerListValue(JNode ind)
char d = (char)t;
if (d != '(')
{
- throw new RemesPathException($"Invalid token {d} at position {ii}");
+ throw new RemesPathException($"Invalid token {d} at position {pos}");
}
int unclosed_parens = 1;
List subquery = new List();
- for (int end = ii + 1; end < toks.Count; end++)
+ for (int end = pos + 1; end < toks.Count; end++)
{
object subtok = toks[end];
if (subtok is char)
@@ -1443,8 +1494,8 @@ private static object GetSingleIndexerListValue(JNode ind)
{
if (--unclosed_parens == 0)
{
- (last_tok, _) = ParseExprOrScalarFunc(subquery, 0);
- ii = end + 1;
+ last_tok = (JNode)ParseExprOrScalarFunc(subquery, 0).obj;
+ pos = end + 1;
break;
}
}
@@ -1454,12 +1505,14 @@ private static object GetSingleIndexerListValue(JNode ind)
}
else if (t is ArgFunction)
{
- (last_tok, ii) = ParseArgFunction(toks, ii+1, (ArgFunction)t);
+ Obj_Pos opo = ParseArgFunction(toks, pos+1, (ArgFunction)t);
+ last_tok = (JNode)opo.obj;
+ pos = opo.pos;
}
else
{
last_tok = (JNode)t;
- ii++;
+ pos++;
}
if (last_tok == null)
{
@@ -1469,27 +1522,28 @@ private static object GetSingleIndexerListValue(JNode ind)
{
// the last token is an iterable, so now we look for indexers that slice it
var idxrs = new List();
- object? nt = PeekNextToken(toks, ii - 1);
- object? nt2, nt3;
+ object nt = PeekNextToken(toks, pos - 1);
+ object nt2, nt3;
while (nt != null && nt is char && INDEXER_STARTERS.Contains((char)nt))
{
- nt2 = PeekNextToken(toks, ii);
+ nt2 = PeekNextToken(toks, pos);
bool is_recursive = false;
if (nt2 is char && (char)nt2 == '.' && (char)nt == '.')
{
is_recursive = true;
- nt3 = PeekNextToken(toks, ii + 1);
- ii += (nt3 is char && (char)nt3 == '[') ? 2 : 1;
+ nt3 = PeekNextToken(toks, pos + 1);
+ pos += (nt3 is char && (char)nt3 == '[') ? 2 : 1;
}
- Indexer cur_idxr;
- (cur_idxr, ii) = ParseIndexer(toks, ii);
- nt = PeekNextToken(toks, ii - 1);
+ Obj_Pos opo= ParseIndexer(toks, pos);
+ Indexer cur_idxr = (Indexer)opo.obj;
+ pos = opo.pos;
+ nt = PeekNextToken(toks, pos - 1);
bool is_varname_list = cur_idxr is VarnameList;
bool has_one_option = false;
bool is_projection = false;
if (is_varname_list || cur_idxr is SlicerList)
{
- List? children = null;
+ List children = null;
if (is_varname_list)
{
children = ((VarnameList)cur_idxr).children;
@@ -1511,18 +1565,18 @@ private static object GetSingleIndexerListValue(JNode ind)
has_one_option = true;
}
}
- Func> idx_func = ApplyMultiIndex(children, is_varname_list, is_recursive);
+ Func> idx_func = ApplyMultiIndex(children, is_varname_list, is_recursive);
idxrs.Add(new IndexerFunc(idx_func, has_one_option, is_projection, is_varname_list, is_recursive));
}
else if (cur_idxr is BooleanIndex)
{
object boodex_fun = ((BooleanIndex)cur_idxr).value;
- Func> idx_func = ApplyBooleanIndex((JNode)boodex_fun);
+ Func> idx_func = ApplyBooleanIndex((JNode)boodex_fun);
idxrs.Add(new IndexerFunc(idx_func, has_one_option, is_projection, is_varname_list, is_recursive));
}
else if (cur_idxr is Projection)
{
- Func> proj_func = ((Projection)cur_idxr).proj_func;
+ Func> proj_func = ((Projection)cur_idxr).proj_func;
idxrs.Add(new IndexerFunc(proj_func, false, true, false, false));
}
else
@@ -1540,47 +1594,47 @@ JNode idx_func(JNode inp)
{
return ApplyIndexerList(idxrs)(lcur.function(inp));
}
- return (new CurJson(lcur.type, idx_func), ii);
+ return new Obj_Pos(new CurJson(lcur.type, idx_func), pos);
}
if (last_tok is JObject)
{
- return (ApplyIndexerList(idxrs)((JObject)last_tok), ii);
+ return new Obj_Pos(ApplyIndexerList(idxrs)((JObject)last_tok), pos);
}
- return (ApplyIndexerList(idxrs)((JArray)last_tok), ii);
+ return new Obj_Pos(ApplyIndexerList(idxrs)((JArray)last_tok), pos);
}
}
- return (last_tok, ii);
+ return new Obj_Pos(last_tok, pos);
}
- private (JNode node, int ii) ParseExprOrScalarFunc(List toks, int ii)
+ private Obj_Pos ParseExprOrScalarFunc(List toks, int pos)
{
- object? curtok = null;
- object? nt = PeekNextToken(toks, ii);
+ object curtok = null;
+ object nt = PeekNextToken(toks, pos);
// most common case is a single JNode followed by the end of the query or an expr func ender
// e.g., in @[0,1,2], all of 0, 1, and 2 are immediately followed by an expr func ender
// and in @.foo.bar the bar is followed by EOF
// MAKE THE COMMON CASE FAST!
if (nt == null || (nt is char && EXPR_FUNC_ENDERS.Contains((char)nt)))
{
- curtok = toks[ii];
+ curtok = toks[pos];
if (!(curtok is JNode))
{
throw new RemesPathException($"Invalid token {curtok} where JNode expected");
}
- return ((JNode)curtok, ii + 1);
+ return new Obj_Pos((JNode)curtok, pos + 1);
}
bool uminus = false;
- object? left_tok = null;
- object? left_operand = null;
+ object left_tok = null;
+ object left_operand = null;
float left_precedence = float.MinValue;
BinopWithArgs root = null;
BinopWithArgs leaf = null;
object[] children = new object[2];
Binop func;
- while (ii < toks.Count)
+ while (pos < toks.Count)
{
left_tok = curtok;
- curtok = toks[ii];
+ curtok = toks[pos];
if (curtok is char && EXPR_FUNC_ENDERS.Contains((char)curtok))
{
if (left_tok == null)
@@ -1644,7 +1698,7 @@ JNode idx_func(JNode inp)
}
left_precedence = func.precedence;
}
- ii++;
+ pos++;
}
else
{
@@ -1652,10 +1706,12 @@ JNode idx_func(JNode inp)
{
throw new RemesPathException("Can't have two iterables or scalars unseparated by a binop");
}
- (left_operand, ii) = ParseExprOrScalar(toks, ii);
+ Obj_Pos opo = ParseExprOrScalar(toks, pos);
+ left_operand = opo.obj;
+ pos = opo.pos;
if (uminus)
{
- nt = PeekNextToken(toks, ii - 1);
+ nt = PeekNextToken(toks, pos - 1);
if (!(nt != null && nt is Binop && ((Binop)nt).name == "**"))
{
// applying unary minus to this expr/scalar has higher precedence than everything except
@@ -1678,30 +1734,32 @@ JNode idx_func(JNode inp)
{
throw new RemesPathException("Null return from ParseExprOrScalar");
}
- return ((JNode)left_operand, ii);
+ return new Obj_Pos((JNode)left_operand, pos);
}
- private (JNode node, int ii) ParseArgFunction(List toks, int ii, ArgFunction fun)
+ private Obj_Pos ParseArgFunction(List toks, int pos, ArgFunction fun)
{
- object t = toks[ii];
+ object t = toks[pos];
if (!(t is char && (char)t == '('))
{
throw new RemesPathException($"Function {fun.name} must have parens surrounding arguments");
}
- ii++;
+ pos++;
int arg_num = 0;
Dtype[] intypes = fun.input_types();
JNode[] args = new JNode[fun.max_args];
- JNode? cur_arg = null;
- while (ii < toks.Count)
+ JNode cur_arg = null;
+ while (pos < toks.Count)
{
- t = toks[ii];
+ t = toks[pos];
Dtype type_options = intypes[arg_num];
try
{
try
{
- (cur_arg, ii) = ParseExprOrScalarFunc(toks, ii);
+ Obj_Pos opo = ParseExprOrScalarFunc(toks, pos);
+ cur_arg = (JNode)opo.obj;
+ pos = opo.pos;
}
catch
{
@@ -1709,7 +1767,7 @@ JNode idx_func(JNode inp)
}
if ((Dtype.SLICE & type_options) != 0)
{
- object? nt = PeekNextToken(toks, ii - 1);
+ object nt = PeekNextToken(toks, pos - 1);
if (nt is char && (char)nt == ':')
{
int? first_num;
@@ -1721,7 +1779,9 @@ JNode idx_func(JNode inp)
{
first_num = Convert.ToInt32(cur_arg.value);
}
- (cur_arg, ii) = ParseSlicer(toks, ii, first_num);
+ Obj_Pos opo = ParseSlicer(toks, pos, first_num);
+ cur_arg = (JNode)opo.obj;
+ pos = opo.pos;
}
}
if (cur_arg == null || (cur_arg.type & type_options) == 0)
@@ -1736,7 +1796,7 @@ JNode idx_func(JNode inp)
throw new RemesPathException($"For arg {arg_num} of function {fun.name}, expected argument of type "
+ $"in {type_options}, instead threw exception {ex}.");
}
- t = toks[ii];
+ t = toks[pos];
bool comma = false;
bool close_paren = false;
if (t is char)
@@ -1760,7 +1820,7 @@ JNode idx_func(JNode inp)
$"({fun.min_args} - {fun.max_args} args)");
}
args[arg_num++] = cur_arg;
- ii++;
+ pos++;
if (close_paren)
{
var withargs = new ArgFunctionWithArgs(fun, args);
@@ -1769,22 +1829,23 @@ JNode idx_func(JNode inp)
// fill the remaining args with null nodes; alternatively we could have ArgFunctions use JNode?[] instead of JNode[]
args[arg2] = new JNode(null, Dtype.NULL, 0);
}
- return (ApplyArgFunction(withargs), ii);
+ return new Obj_Pos(ApplyArgFunction(withargs), pos);
}
}
throw new RemesPathException($"Expected ')' after argument {arg_num} of function {fun.name} "
+ $"({fun.min_args} - {fun.max_args} args)");
}
- private (Indexer proj, int ii) ParseProjection(List toks, int ii)
+ private Obj_Pos ParseProjection(List toks, int pos)
{
- var children = new List<(object, JNode)>();
+ var children = new List();
bool is_object_proj = false;
- while (ii < toks.Count)
+ while (pos < toks.Count)
{
- JNode key;
- (key, ii) = ParseExprOrScalarFunc(toks, ii);
- object? nt = PeekNextToken(toks, ii - 1);
+ Obj_Pos opo = ParseExprOrScalarFunc(toks, pos);
+ JNode key = (JNode)opo.obj;
+ pos = opo.pos;
+ object nt = PeekNextToken(toks, pos - 1);
if (nt is char)
{
char nd = (char)nt;
@@ -1796,11 +1857,12 @@ JNode idx_func(JNode inp)
}
if (key.type == Dtype.STR)
{
- JNode val;
- (val, ii) = ParseExprOrScalarFunc(toks, ii + 1);
- children.Add(((string)key.value, val));
+ opo = ParseExprOrScalarFunc(toks, pos + 1);
+ JNode val = (JNode)opo.obj;
+ pos = opo.pos;
+ children.Add(new Key_Node((string)key.value, val));
is_object_proj = true;
- nt = PeekNextToken(toks, ii - 1);
+ nt = PeekNextToken(toks, pos - 1);
if (!(nt is char))
{
throw new RemesPathException("Key-value pairs in projection must be delimited by ',' and projections must end with '}'.");
@@ -1815,18 +1877,20 @@ JNode idx_func(JNode inp)
else
{
// it's an array projection
- children.Add((0, key));
+ children.Add(new Key_Node(0, key));
}
if (nd == '}')
{
- IEnumerable<(object, JNode)> proj_func(JNode obj)
+ IEnumerable proj_func(JNode obj)
{
- foreach((object k, JNode v) in children)
+ foreach(Key_Node kv in children)
{
- yield return (k, (v is CurJson) ? ((CurJson)v).function(obj) : v);
+ object k = kv.obj;
+ JNode v = kv.node;
+ yield return new Key_Node(k, (v is CurJson) ? ((CurJson)v).function(obj) : v);
}
}
- return (new Projection(proj_func), ii + 1);
+ return new Obj_Pos(new Projection(proj_func), pos + 1);
}
if (nd != ',')
{
@@ -1837,7 +1901,7 @@ JNode idx_func(JNode inp)
{
throw new RemesPathException("Values or key-value pairs in a projection must be comma-delimited");
}
- ii++;
+ pos++;
}
throw new RemesPathException("Unterminated projection");
}
@@ -1899,35 +1963,38 @@ public class SliceTester
public static void Test()
{
int[] onetofive = new int[] { 1, 2, 3, 4, 5 };
- var testcases = new (int[] input, int? start, int? stop, int? stride, int[] desired)[]
- {
- (onetofive, 2, null, null, new int[]{1, 2}),
- (onetofive, null, null, null, onetofive),
- (onetofive, null, 1, null, new int[]{1}),
- (onetofive, 1, 3, null, new int[]{2, 3}),
- (onetofive, 1, 4, 2, new int[]{2, 4}),
- (onetofive, 2, null, -1, new int[]{3, 2, 1}),
- (onetofive, 4, 1, -2, new int[]{5, 3}),
- (onetofive, 1, null, 3, new int[]{2, 5}),
- (onetofive, 4, 2, -1, new int[]{5, 4}),
- (onetofive, -3, null, null, new int[]{1,2}),
- (onetofive, -4, -1, null, new int[]{2,3,4}),
- (onetofive, -4, null, 2, new int[]{2, 4}),
- (onetofive, null, -3, null, new int[]{1,2}),
- (onetofive, -3, null, 1, new int[]{3,4,5}),
- (onetofive, -3, null, -1, new int[]{3,2,1}),
- (onetofive, -1, 1, -2, new int[]{5, 3}),
- (onetofive, 1, -1, null, new int[]{2,3,4}),
- (onetofive, -4, 4, null, new int[]{2,3,4}),
- (onetofive, -4, 4, 2, new int[]{2, 4}),
- (onetofive, 2, -2, 2, new int[]{3}),
- (onetofive, -4, null, -2, new int[]{2}),
- (onetofive, 2, 1, null, new int[]{ })
+ var testcases = new object[][]
+ {
+ new object[]{ onetofive, 2, null, null, new int[]{1, 2} },
+ new object[]{ onetofive, null, null, null, onetofive },
+ new object[]{ onetofive, null, 1, null, new int[]{1} },
+ new object[]{ onetofive, 1, 3, null, new int[]{2, 3} },
+ new object[]{ onetofive, 1, 4, 2, new int[]{2, 4} },
+ new object[]{ onetofive, 2, null, -1, new int[]{3, 2, 1} },
+ new object[]{ onetofive, 4, 1, -2, new int[]{5, 3} },
+ new object[]{ onetofive, 1, null, 3, new int[]{2, 5} },
+ new object[]{ onetofive, 4, 2, -1, new int[]{5, 4} },
+ new object[]{ onetofive, -3, null, null, new int[]{1,2} },
+ new object[]{ onetofive, -4, -1, null, new int[]{2,3,4} },
+ new object[]{ onetofive, -4, null, 2, new int[]{2, 4} },
+ new object[]{ onetofive, null, -3, null, new int[]{1,2} },
+ new object[]{ onetofive, -3, null, 1, new int[]{3,4,5} },
+ new object[]{ onetofive, -3, null, -1, new int[]{3,2,1} },
+ new object[]{ onetofive, -1, 1, -2, new int[]{5, 3} },
+ new object[]{ onetofive, 1, -1, null, new int[]{2,3,4} },
+ new object[]{ onetofive, -4, 4, null, new int[]{2,3,4} },
+ new object[]{ onetofive, -4, 4, 2, new int[]{2, 4} },
+ new object[]{ onetofive, 2, -2, 2, new int[]{3} },
+ new object[]{ onetofive, -4, null, -2, new int[]{2} },
+ new object[]{ onetofive, 2, 1, null, new int[]{ } }
};
+ //(int[] input, int start, int stop, int stride, int[] desired)
int tests_failed = 0;
int ii = 0;
- foreach ((int[] input, int? start, int? stop, int? stride, int[] desired) in testcases)
+ foreach (object[] stuff in testcases)
{
+ int[] input = (int[])stuff[0], desired = (int[])stuff[4];
+ int start = (int)stuff[1], stop = (int)stuff[2], stride = (int)stuff[3];
int[] output = (int[])input.Slice(start, stop, stride);
// verify that it works for both arrays and Lists, because both implement IList
List list_output = (List)(new List(input)).Slice(start, stop, stride);
@@ -1977,39 +2044,42 @@ public static void Test()
}
ii++;
}
- var str_testcases = new (int[] input, string slicer, int[] desired)[]
- {
- (onetofive, "2", new int[]{1, 2}),
- (onetofive, ":", onetofive),
- (onetofive, ":1", new int[]{1}),
- (onetofive, "1:3", new int[]{2, 3}),
- (onetofive, "1::3", new int[]{2, 5}),
- (onetofive, "1:4:2", new int[]{2, 4}),
- (onetofive, "2::-1", new int[]{3, 2, 1}),
- (onetofive, "4:1:-2", new int[]{5, 3}),
- (onetofive, "1::3", new int[]{2, 5}),
- (onetofive, "4:2:-1", new int[]{5, 4}),
- (onetofive, "-3", new int[]{1,2}),
- (onetofive, "-4:-1", new int[]{2,3,4}),
- (onetofive, "-4::2", new int[]{2, 4}),
- (onetofive, ":-3", new int[]{1,2}),
- (onetofive, "-3::1", new int[]{3,4,5}),
- (onetofive, "-3:", new int[]{3,4,5}),
- (onetofive, "-3::-1", new int[]{3,2,1}),
- (onetofive, "-1:1:-2", new int[]{5, 3}),
- (onetofive, "1:-1", new int[]{2,3,4}),
- (onetofive, "-4:4", new int[]{2,3,4}),
- (onetofive, "-4:4:2", new int[]{2, 4}),
- (onetofive, "2:-2:2", new int[]{3}),
- (onetofive, "3::5", new int[]{4}),
- (onetofive, "5:", new int[]{ }),
- (onetofive, "3:8", new int[]{4, 5}),
- (onetofive, "-2:15", new int[]{4,5})
+ var str_testcases = new object[][]
+ {
+ new object[]{ onetofive, "2", new int[]{1, 2} },
+ new object[]{ onetofive, ":", onetofive },
+ new object[]{ onetofive, ":1", new int[]{1} },
+ new object[]{ onetofive, "1:3", new int[]{2, 3} },
+ new object[]{ onetofive, "1::3", new int[]{2, 5} },
+ new object[]{ onetofive, "1:4:2", new int[]{2, 4} },
+ new object[]{ onetofive, "2::-1", new int[]{3, 2, 1} },
+ new object[]{ onetofive, "4:1:-2", new int[]{5, 3} },
+ new object[]{ onetofive, "1::3", new int[]{2, 5} },
+ new object[]{ onetofive, "4:2:-1", new int[]{5, 4} },
+ new object[]{ onetofive, "-3", new int[]{1,2} },
+ new object[]{ onetofive, "-4:-1", new int[]{2,3,4} },
+ new object[]{ onetofive, "-4::2", new int[]{2, 4} },
+ new object[]{ onetofive, ":-3", new int[]{1,2} },
+ new object[]{ onetofive, "-3::1", new int[]{3,4,5} },
+ new object[]{ onetofive, "-3:", new int[]{3,4,5} },
+ new object[]{ onetofive, "-3::-1", new int[]{3,2,1} },
+ new object[]{ onetofive, "-1:1:-2", new int[]{5, 3} },
+ new object[]{ onetofive, "1:-1", new int[]{2,3,4} },
+ new object[]{ onetofive, "-4:4", new int[]{2,3,4} },
+ new object[]{ onetofive, "-4:4:2", new int[]{2, 4} },
+ new object[]{ onetofive, "2:-2:2", new int[]{3} },
+ new object[]{ onetofive, "3::5", new int[]{4} },
+ new object[]{ onetofive, "5:", new int[]{ } },
+ new object[]{ onetofive, "3:8", new int[]{4, 5} },
+ new object[]{ onetofive, "-2:15", new int[]{4,5} }
};
// test string slicer
- foreach ((int[] input, string slicer, int[] desired) in str_testcases)
+ foreach (object[] inp_sli_desired in str_testcases)
{
- int[] output = (int[])input.Slice(slicer);
+ int[] inp = (int[])inp_sli_desired[0];
+ string slicer = (string)inp_sli_desired[1];
+ int[] desired = (int[])inp_sli_desired[2];
+ int[] output = (int[])inp.Slice(slicer);
var sb_desired = new StringBuilder();
sb_desired.Append('{');
foreach (int desired_value in desired)
@@ -2034,7 +2104,7 @@ public static void Test()
tests_failed++;
Console.WriteLine(String.Format("Test {0} ({1}.Slice(\"{2}\")) failed:\n" +
"Expected\n{3}\nGot\n{4}",
- ii+1, input, slicer, str_desired, str_output));
+ ii+1, inp, slicer, str_desired, str_output));
}
ii++;
}
@@ -2046,6 +2116,18 @@ public static void Test()
public class RemesParserTester
{
+ public struct Query_DesiredResult
+ {
+ public string query;
+ public string desired_result;
+
+ public Query_DesiredResult(string query, string desired_result)
+ {
+ this.query = query;
+ this.desired_result = desired_result;
+ }
+ }
+
public static void Test()
{
JsonParser jsonParser = new JsonParser();
@@ -2054,173 +2136,173 @@ public static void Test()
"\"quz\": {}, \"jub\": [], \"guzo\": [[[1]], [[2], [3]]], \"7\": [{\"foo\": 2}, 1], \"_\": {\"0\": 0}}");
RemesParser remesparser = new RemesParser();
Console.WriteLine($"The queried JSON in the RemesParser tests is:{foo.ToString()}");
- var testcases = new (string query, string desired_result)[]
+ var testcases = new Query_DesiredResult[]
{
// binop precedence tests
- ("2 - 4 * 3.5", "-12.0"),
- ("2 / 3 - 4 * 5 ** 1", "-58/3"),
- ("5 ** (6 - 2)", "625.0"),
+ new Query_DesiredResult("2 - 4 * 3.5", "-12.0"),
+ new Query_DesiredResult("2 / 3 - 4 * 5 ** 1", "-58/3"),
+ new Query_DesiredResult("5 ** (6 - 2)", "625.0"),
// binop two jsons, binop json scalar, binop scalar json tests
- ("@.foo[0] + @.foo[1]", "[3.0, 5.0, 7.0]"),
- ("@.foo[0] + j`[3.0, 4.0, 5.0]`", "[3.0, 5.0, 7.0]"),
- ("j`[0, 1, 2]` + @.foo[1]", "[3.0, 5.0, 7.0]"),
- ("1 + @.foo[0]", "[1, 2, 3]"),
- ("@.foo[0] + 1", "[1, 2, 3]"),
- ("1 + j`[0, 1, 2]`", "[1, 2, 3]"),
- ("j`[0, 1, 2]` + 1", "[1, 2, 3]"),
- ("`a` + str(range(3))", "[\"a0\", \"a1\", \"a2\"]"),
- ("str(range(3)) + `a`", "[\"0a\", \"1a\", \"2a\"]"),
- ("str(@.foo[0]) + `a`", "[\"0a\", \"1a\", \"2a\"]"),
- ("`a` + str(@.foo[0])", "[\"a0\", \"a1\", \"a2\"]"),
+ new Query_DesiredResult("@.foo[0] + @.foo[1]", "[3.0, 5.0, 7.0]"),
+ new Query_DesiredResult("@.foo[0] + j`[3.0, 4.0, 5.0]`", "[3.0, 5.0, 7.0]"),
+ new Query_DesiredResult("j`[0, 1, 2]` + @.foo[1]", "[3.0, 5.0, 7.0]"),
+ new Query_DesiredResult("1 + @.foo[0]", "[1, 2, 3]"),
+ new Query_DesiredResult("@.foo[0] + 1", "[1, 2, 3]"),
+ new Query_DesiredResult("1 + j`[0, 1, 2]`", "[1, 2, 3]"),
+ new Query_DesiredResult("j`[0, 1, 2]` + 1", "[1, 2, 3]"),
+ new Query_DesiredResult("`a` + str(range(3))", "[\"a0\", \"a1\", \"a2\"]"),
+ new Query_DesiredResult("str(range(3)) + `a`", "[\"0a\", \"1a\", \"2a\"]"),
+ new Query_DesiredResult("str(@.foo[0]) + `a`", "[\"0a\", \"1a\", \"2a\"]"),
+ new Query_DesiredResult("`a` + str(@.foo[0])", "[\"a0\", \"a1\", \"a2\"]"),
// uminus tests
- ("-j`[1]`", "[-1]"),
- ("-j`[1,2]`**-3", "[-1.0, -1/8]"),
- ("-@.foo[2]", "[-6.0, -7.0, -8.0]"),
- ("2/--3", "2/3"),
+ new Query_DesiredResult("-j`[1]`", "[-1]"),
+ new Query_DesiredResult("-j`[1,2]`**-3", "[-1.0, -1/8]"),
+ new Query_DesiredResult("-@.foo[2]", "[-6.0, -7.0, -8.0]"),
+ new Query_DesiredResult("2/--3", "2/3"),
// indexing tests
- ("@.baz", "\"z\""),
- ("@.foo[0]", "[0, 1, 2]"),
- ("@[g`^b`]", "{\"bar\": {\"a\": false, \"b\": [\"a`g\", \"bah\"]}, \"baz\": \"z\"}"),
- ("@.foo[1][@ > 3.5]", "[4.0, 5.0]"),
- ("@.foo[-2:]", "[[3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]"),
- ("@.foo[:3:2]", "[[0, 1, 2], [6.0, 7.0, 8.0]]"),
- ("@[foo, jub]", "{\"foo\": [[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]], \"jub\": []}"),
- ("@[foo, jub][2]", "{\"foo\": [6.0, 7.0, 8.0]}"),
- ("@[foo][0][0,2]", "[0, 2]"),
- ("@[foo][0][0, 2:]", "[0, 2]"),
- ("@[foo][0][2:, 0]", "[2, 0]"),
- ("@[foo][0][0, 2:, 1] ", "[0, 2, 1]"),
- ("@[foo][0][:1, 2:]", "[0, 2]"),
- ("@[foo][0][0, 2:4]", "[0, 2]"),
- ("@[foo][0][3:, 0]", "[0]"),
- ("@.*", foo.ToString()),
- ("@.foo[*]", "[[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]"),
- ("@.foo[:2][2*@[0] >= @[1]]", "[[3.0, 4.0, 5.0]]"),
- ("@.foo[-1]", "[6.0, 7.0, 8.0]"),
- ("@.g`[a-z]oo`", "{\"foo\": [[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]}"),
+ new Query_DesiredResult("@.baz", "\"z\""),
+ new Query_DesiredResult("@.foo[0]", "[0, 1, 2]"),
+ new Query_DesiredResult("@[g`^b`]", "{\"bar\": {\"a\": false, \"b\": [\"a`g\", \"bah\"]}, \"baz\": \"z\"}"),
+ new Query_DesiredResult("@.foo[1][@ > 3.5]", "[4.0, 5.0]"),
+ new Query_DesiredResult("@.foo[-2:]", "[[3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]"),
+ new Query_DesiredResult("@.foo[:3:2]", "[[0, 1, 2], [6.0, 7.0, 8.0]]"),
+ new Query_DesiredResult("@[foo, jub]", "{\"foo\": [[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]], \"jub\": []}"),
+ new Query_DesiredResult("@[foo, jub][2]", "{\"foo\": [6.0, 7.0, 8.0]}"),
+ new Query_DesiredResult("@[foo][0][0,2]", "[0, 2]"),
+ new Query_DesiredResult("@[foo][0][0, 2:]", "[0, 2]"),
+ new Query_DesiredResult("@[foo][0][2:, 0]", "[2, 0]"),
+ new Query_DesiredResult("@[foo][0][0, 2:, 1] ", "[0, 2, 1]"),
+ new Query_DesiredResult("@[foo][0][:1, 2:]", "[0, 2]"),
+ new Query_DesiredResult("@[foo][0][0, 2:4]", "[0, 2]"),
+ new Query_DesiredResult("@[foo][0][3:, 0]", "[0]"),
+ new Query_DesiredResult("@.*", foo.ToString()),
+ new Query_DesiredResult("@.foo[*]", "[[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]"),
+ new Query_DesiredResult("@.foo[:2][2*@[0] >= @[1]]", "[[3.0, 4.0, 5.0]]"),
+ new Query_DesiredResult("@.foo[-1]", "[6.0, 7.0, 8.0]"),
+ new Query_DesiredResult("@.g`[a-z]oo`", "{\"foo\": [[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]]}"),
// ufunction tests
- ("len(@)", ((JObject)foo).Length.ToString()),
- ("s_mul(@.bar.b, 2)", "[\"a`ga`g\", \"bahbah\"]"),
- ("in(1, @.foo[0])", "true"),
- ("in(4.0, @.foo[0])", "false"),
- ("in(`foo`, @)", "true"),
- ("in(`fjdkfjdkuren`, @)", "false"),
- ("range(2, len(@)) * 3", "[6, 9, 12, 15, 18, 21]"),
- ("sort_by(@.foo, 0, true)[:2]", "[[6.0, 7.0, 8.0], [3.0, 4.0, 5.0]]"),
- ("mean(flatten(@.foo[0]))", "1.0"),
- ("flatten(@.foo)[:4]", "[0, 1, 2, 3.0]"),
- ("flatten(@.guzo, 2)", "[1, 2, 3]"),
- ("min_by(@.foo, 1)", "[0, 1, 2]"),
- ("s_sub(@.bar.b, g`a(\\`?)`, `$1z`)", "[\"`zg\", \"bzh\"]"),
- ("isna(@.foo[0])", "[false, false, false]"),
- ("s_slice(@.bar.b, 2)", "[\"g\", \"h\"]"),
- ("s_slice(@.bar.b, ::2)", "[\"ag\", \"bh\"]"),
- ("str(@.foo[2])", "[\"6.0\", \"7.0\", \"8.0\"]"),
- ("int(@.foo[1])", "[3, 4, 5]"),
- ("s_slice(str(@.foo[2]), 2:)", "[\"0\", \"0\", \"0\"]"),
- ("sorted(flatten(@.guzo, 2))", "[1, 2, 3]"),
- ("keys(@)", "[\"foo\", \"bar\", \"baz\", \"quz\", \"jub\", \"guzo\", \"7\", \"_\"]"),
- ("values(@.bar)[:]", "[false, [\"a`g\", \"bah\"]]"),
- ("s_join(`\t`, @.bar.b)", "\"a`g\tbah\""),
- ("sorted(unique(@.foo[1]), true)", "[5.0, 4.0, 3.0]"), // have to sort because this function involves a HashSet so order is random
- ("unique(@.foo[0], true)", "[0, 1, 2]"),
- ("sort_by(value_counts(@.foo[0]), 1)", "[[0, 1], [1, 1], [2, 1]]"), // function involves a Dictionary so order is inherently random
- ("sort_by(value_counts(j`[1, 2, 1, 3, 1]`), 0)", "[[1, 3], [2, 1], [3, 1]]"),
- ("quantile(flatten(@.foo[1:]), 0.5)", "5.5"),
- ("float(@.foo[0])[:1]", "[0.0]"),
- ("not(is_expr(values(@.bar)))", "[true, false]"),
- ("round(@.foo[0] * 1.66)", "[0, 2, 3]"),
- ("round(@.foo[0] * 1.66, 1)", "[0.0, 1.7, 3.3]"),
- ("round(@.foo[0] * 1.66, 2)", "[0.0, 1.66, 3.32]"),
- ("s_find(@.bar.b, g`[a-z]+`)", "[[\"a\", \"g\"], [\"bah\"]]"),
- ("s_count(@.bar.b, `a`)", "[1, 1]"),
- ("s_count(@.bar.b, g`[a-z]`)", "[2, 3]"),
- ("ifelse(@.foo[0] > quantile(@.foo[0], 0.5), `big`, `small`)", "[\"small\", \"small\", \"big\"]"),
- ("ifelse(is_num(j`[1, \"a\", 2.0]`), isnum, notnum)", "[\"isnum\", \"notnum\", \"isnum\"]"),
- ("s_upper(j`[\"hello\", \"world\"]`)", "[\"HELLO\", \"WORLD\"]"),
- ("s_strip(` a dog!\t`)", "\"a dog!\""),
- ("log(@.foo[0] + 1)", $"[0.0, {Math.Log(2)}, {Math.Log(3)}]"),
- ("log2(@.foo[1])", $"[{Math.Log2(3)}, 2.0, {Math.Log2(5)}]"),
- ("abs(j`[-1, 0, 1]`)", "[1, 0, 1]"),
- ("is_str(@.bar.b)", "[true, true]"),
- ("s_split(@.bar.b[0], g`[^a-z]+`)", "[\"a\", \"g\"]"),
- ("s_split(@.bar.b, `a`)", "[[\"\", \"`g\"], [\"b\", \"h\"]]"),
- ("group_by(@.foo, 0)", "{\"0\": [[0, 1, 2]], \"3.0\": [[3.0, 4.0, 5.0]], \"6.0\": [[6.0, 7.0, 8.0]]}"),
- ("group_by(j`[{\"foo\": 1, \"bar\": \"a\"}, {\"foo\": 2, \"bar\": \"b\"}, {\"foo\": 3, \"bar\": \"b\"}]`, bar).*{`sum`: sum(@[:].foo), `count`: len(@)}", "{\"a\": {\"sum\": 1.0, \"count\": 1}, \"b\": {\"sum\": 5.0, \"count\": 2}}"),
+ new Query_DesiredResult("len(@)", ((JObject)foo).Length.ToString()),
+ new Query_DesiredResult("s_mul(@.bar.b, 2)", "[\"a`ga`g\", \"bahbah\"]"),
+ new Query_DesiredResult("in(1, @.foo[0])", "true"),
+ new Query_DesiredResult("in(4.0, @.foo[0])", "false"),
+ new Query_DesiredResult("in(`foo`, @)", "true"),
+ new Query_DesiredResult("in(`fjdkfjdkuren`, @)", "false"),
+ new Query_DesiredResult("range(2, len(@)) * 3", "[6, 9, 12, 15, 18, 21]"),
+ new Query_DesiredResult("sort_by(@.foo, 0, true)[:2]", "[[6.0, 7.0, 8.0], [3.0, 4.0, 5.0]]"),
+ new Query_DesiredResult("mean(flatten(@.foo[0]))", "1.0"),
+ new Query_DesiredResult("flatten(@.foo)[:4]", "[0, 1, 2, 3.0]"),
+ new Query_DesiredResult("flatten(@.guzo, 2)", "[1, 2, 3]"),
+ new Query_DesiredResult("min_by(@.foo, 1)", "[0, 1, 2]"),
+ new Query_DesiredResult("s_sub(@.bar.b, g`a(\\`?)`, `$1z`)", "[\"`zg\", \"bzh\"]"),
+ new Query_DesiredResult("isna(@.foo[0])", "[false, false, false]"),
+ new Query_DesiredResult("s_slice(@.bar.b, 2)", "[\"g\", \"h\"]"),
+ new Query_DesiredResult("s_slice(@.bar.b, ::2)", "[\"ag\", \"bh\"]"),
+ new Query_DesiredResult("str(@.foo[2])", "[\"6.0\", \"7.0\", \"8.0\"]"),
+ new Query_DesiredResult("int(@.foo[1])", "[3, 4, 5]"),
+ new Query_DesiredResult("s_slice(str(@.foo[2]), 2:)", "[\"0\", \"0\", \"0\"]"),
+ new Query_DesiredResult("sorted(flatten(@.guzo, 2))", "[1, 2, 3]"),
+ new Query_DesiredResult("keys(@)", "[\"foo\", \"bar\", \"baz\", \"quz\", \"jub\", \"guzo\", \"7\", \"_\"]"),
+ new Query_DesiredResult("values(@.bar)[:]", "[false, [\"a`g\", \"bah\"]]"),
+ new Query_DesiredResult("s_join(`\t`, @.bar.b)", "\"a`g\tbah\""),
+ new Query_DesiredResult("sorted(unique(@.foo[1]), true)", "[5.0, 4.0, 3.0]"), // have to sort because this function involves a HashSet so order is random
+ new Query_DesiredResult("unique(@.foo[0], true)", "[0, 1, 2]"),
+ new Query_DesiredResult("sort_by(value_counts(@.foo[0]), 1)", "[[0, 1], [1, 1], [2, 1]]"), // function involves a Dictionary so order is inherently random
+ new Query_DesiredResult("sort_by(value_counts(j`[1, 2, 1, 3, 1]`), 0)", "[[1, 3], [2, 1], [3, 1]]"),
+ new Query_DesiredResult("quantile(flatten(@.foo[1:]), 0.5)", "5.5"),
+ new Query_DesiredResult("float(@.foo[0])[:1]", "[0.0]"),
+ new Query_DesiredResult("not(is_expr(values(@.bar)))", "[true, false]"),
+ new Query_DesiredResult("round(@.foo[0] * 1.66)", "[0, 2, 3]"),
+ new Query_DesiredResult("round(@.foo[0] * 1.66, 1)", "[0.0, 1.7, 3.3]"),
+ new Query_DesiredResult("round(@.foo[0] * 1.66, 2)", "[0.0, 1.66, 3.32]"),
+ new Query_DesiredResult("s_find(@.bar.b, g`[a-z]+`)", "[[\"a\", \"g\"], [\"bah\"]]"),
+ new Query_DesiredResult("s_count(@.bar.b, `a`)", "[1, 1]"),
+ new Query_DesiredResult("s_count(@.bar.b, g`[a-z]`)", "[2, 3]"),
+ new Query_DesiredResult("ifelse(@.foo[0] > quantile(@.foo[0], 0.5), `big`, `small`)", "[\"small\", \"small\", \"big\"]"),
+ new Query_DesiredResult("ifelse(is_num(j`[1, \"a\", 2.0]`), isnum, notnum)", "[\"isnum\", \"notnum\", \"isnum\"]"),
+ new Query_DesiredResult("s_upper(j`[\"hello\", \"world\"]`)", "[\"HELLO\", \"WORLD\"]"),
+ new Query_DesiredResult("s_strip(` a dog!\t`)", "\"a dog!\""),
+ new Query_DesiredResult("log(@.foo[0] + 1)", $"[0.0, {Math.Log(2)}, {Math.Log(3)}]"),
+ new Query_DesiredResult("log2(@.foo[1])", $"[{Math.Log(3, 2)}, 2.0, {Math.Log(5, 2)}]"),
+ new Query_DesiredResult("abs(j`[-1, 0, 1]`)", "[1, 0, 1]"),
+ new Query_DesiredResult("is_str(@.bar.b)", "[true, true]"),
+ new Query_DesiredResult("s_split(@.bar.b[0], g`[^a-z]+`)", "[\"a\", \"g\"]"),
+ new Query_DesiredResult("s_split(@.bar.b, `a`)", "[[\"\", \"`g\"], [\"b\", \"h\"]]"),
+ new Query_DesiredResult("group_by(@.foo, 0)", "{\"0\": [[0, 1, 2]], \"3.0\": [[3.0, 4.0, 5.0]], \"6.0\": [[6.0, 7.0, 8.0]]}"),
+ new Query_DesiredResult("group_by(j`[{\"foo\": 1, \"bar\": \"a\"}, {\"foo\": 2, \"bar\": \"b\"}, {\"foo\": 3, \"bar\": \"b\"}]`, bar).*{`sum`: sum(@[:].foo), `count`: len(@)}", "{\"a\": {\"sum\": 1.0, \"count\": 1}, \"b\": {\"sum\": 5.0, \"count\": 2}}"),
//("agg_by(@.foo, 0, sum(flatten(@)))", "{\"0\": 3.0, \"3.0\": 11.0, \"6.0\": 21.0}"),
- ("index(j`[1,3,2,3,1]`, max(j`[1,3,2,3,1]`), true)", "3"),
- ("index(@.foo[0], min(@.foo[0]))", "0"),
- ("zip(j`[1,2,3]`, j`[\"a\", \"b\", \"c\"]`)", "[[1, \"a\"], [2, \"b\"], [3, \"c\"]]"),
- ("zip(@.foo[0], @.foo[1], @.foo[2], j`[-20, -30, -40]`)", "[[0, 3.0, 6.0, -20], [1, 4.0, 7.0, -30], [2, 5.0, 8.0, -40]]"),
- ("dict(zip(keys(@.bar), j`[1, 2]`))", "{\"a\": 1, \"b\": 2}"),
- ("dict(items(@))", foo.ToString()),
- ("dict(j`[[\"a\", 1], [\"b\", 2], [\"c\", 3]]`)", "{\"a\": 1, \"b\": 2, \"c\": 3}"),
- ("items(j`{\"a\": 1, \"b\": 2, \"c\": 3}`)", "[[\"a\", 1], [\"b\", 2], [\"c\", 3]]"),
- ("isnull(@.foo)", "[false, false, false]"),
- ("int(isnull(j`[1, 1.5, [], \"a\", \"2000-07-19\", \"1975-07-14 01:48:21\", null, false, {}]`))",
+ new Query_DesiredResult("index(j`[1,3,2,3,1]`, max(j`[1,3,2,3,1]`), true)", "3"),
+ new Query_DesiredResult("index(@.foo[0], min(@.foo[0]))", "0"),
+ new Query_DesiredResult("zip(j`[1,2,3]`, j`[\"a\", \"b\", \"c\"]`)", "[[1, \"a\"], [2, \"b\"], [3, \"c\"]]"),
+ new Query_DesiredResult("zip(@.foo[0], @.foo[1], @.foo[2], j`[-20, -30, -40]`)", "[[0, 3.0, 6.0, -20], [1, 4.0, 7.0, -30], [2, 5.0, 8.0, -40]]"),
+ new Query_DesiredResult("dict(zip(keys(@.bar), j`[1, 2]`))", "{\"a\": 1, \"b\": 2}"),
+ new Query_DesiredResult("dict(items(@))", foo.ToString()),
+ new Query_DesiredResult("dict(j`[[\"a\", 1], [\"b\", 2], [\"c\", 3]]`)", "{\"a\": 1, \"b\": 2, \"c\": 3}"),
+ new Query_DesiredResult("items(j`{\"a\": 1, \"b\": 2, \"c\": 3}`)", "[[\"a\", 1], [\"b\", 2], [\"c\", 3]]"),
+ new Query_DesiredResult("isnull(@.foo)", "[false, false, false]"),
+ new Query_DesiredResult("int(isnull(j`[1, 1.5, [], \"a\", \"2000-07-19\", \"1975-07-14 01:48:21\", null, false, {}]`))",
"[0, 0, 0, 0, 0, 0, 1, 0, 0]"),
- ("range(-10)", "[]"),
- ("range(-3, -5, -1)", "[-3, -4]"),
- ("range(2, 19, -5)", "[]"),
- ("range(2, 19, 5)", "[2, 7, 12, 17]"),
- ("range(3)", "[0, 1, 2]"),
- ("range(3, 5)", "[3, 4]"),
- ("range(-len(@))", "[]"),
- ("range(0, -len(@))", "[]"),
- ("range(0, len(@) - len(@))", "[]"),
- ("range(0, -len(@) + len(@))", "[]"),
+ new Query_DesiredResult("range(-10)", "[]"),
+ new Query_DesiredResult("range(-3, -5, -1)", "[-3, -4]"),
+ new Query_DesiredResult("range(2, 19, -5)", "[]"),
+ new Query_DesiredResult("range(2, 19, 5)", "[2, 7, 12, 17]"),
+ new Query_DesiredResult("range(3)", "[0, 1, 2]"),
+ new Query_DesiredResult("range(3, 5)", "[3, 4]"),
+ new Query_DesiredResult("range(-len(@))", "[]"),
+ new Query_DesiredResult("range(0, -len(@))", "[]"),
+ new Query_DesiredResult("range(0, len(@) - len(@))", "[]"),
+ new Query_DesiredResult("range(0, -len(@) + len(@))", "[]"),
// uminus'd CurJson appears to be causing problems with other arithmetic binops as the second arg to the range function
- ("range(0, -len(@) - len(@))", "[]"),
- ("range(0, -len(@) * len(@))", "[]"),
- ("range(0, 5, -len(@))", "[]"),
- ("-len(@) + len(@)", "0"), // see if binops of uminus'd CurJson are also causing problems when they're not the second arg to the range function
- ("-len(@) * len(@)", (-(((JObject)foo).Length * ((JObject)foo).Length)).ToString()),
- ("abs(-len(@) + len(@))", "0"), // see if other functions (not just range) of binops of uminus'd CurJson cause problems
- ("range(0, abs(-len(@) + len(@)))", "[]"),
- ("range(0, -abs(-len(@) + len(@)))", "[]"),
+ new Query_DesiredResult("range(0, -len(@) - len(@))", "[]"),
+ new Query_DesiredResult("range(0, -len(@) * len(@))", "[]"),
+ new Query_DesiredResult("range(0, 5, -len(@))", "[]"),
+ new Query_DesiredResult("-len(@) + len(@)", "0"), // see if binops of uminus'd CurJson are also causing problems when they're not the second arg to the range function
+ new Query_DesiredResult("-len(@) * len(@)", (-(((JObject)foo).Length * ((JObject)foo).Length)).ToString()),
+ new Query_DesiredResult("abs(-len(@) + len(@))", "0"), // see if other functions (not just range) of binops of uminus'd CurJson cause problems
+ new Query_DesiredResult("range(0, abs(-len(@) + len(@)))", "[]"),
+ new Query_DesiredResult("range(0, -abs(-len(@) + len(@)))", "[]"),
// parens tests
- ("(@.foo[:2])", "[[0, 1, 2], [3.0, 4.0, 5.0]]"),
- ("(@.foo)[0]", "[0, 1, 2]"),
+ new Query_DesiredResult("(@.foo[:2])", "[[0, 1, 2], [3.0, 4.0, 5.0]]"),
+ new Query_DesiredResult("(@.foo)[0]", "[0, 1, 2]"),
// projection tests
- ("@{@.jub, @.quz}", "[[], {}]"),
- ("@.foo{foo: @[0], bar: @[1][:2]}", "{\"foo\": [0, 1, 2], \"bar\": [3.0, 4.0]}"),
- ("sorted(flatten(@.guzo, 2)){`min`: @[0], `max`: @[-1], `tot`: sum(@)}", "{\"min\": 1, \"max\": 3, \"tot\": 6}"),
- ("(@.foo[:]{`max`: max(@), `min`: min(@)})[0]", "{\"max\": 2.0, \"min\": 0.0}"),
- ("len(@.foo[:]{blah: 1})", "3"),
- ("str(@.foo[0]{a: @[0], b: @[1]})", "{\"a\": \"0\", \"b\": \"1\"}"),
- ("max_by(@.foo[:]{mx: max(@), first: @[0]}, mx)", "{\"mx\": 8.0, \"first\": 6.0}"),
+ new Query_DesiredResult("@{@.jub, @.quz}", "[[], {}]"),
+ new Query_DesiredResult("@.foo{foo: @[0], bar: @[1][:2]}", "{\"foo\": [0, 1, 2], \"bar\": [3.0, 4.0]}"),
+ new Query_DesiredResult("sorted(flatten(@.guzo, 2)){`min`: @[0], `max`: @[-1], `tot`: sum(@)}", "{\"min\": 1, \"max\": 3, \"tot\": 6}"),
+ new Query_DesiredResult("(@.foo[:]{`max`: max(@), `min`: min(@)})[0]", "{\"max\": 2.0, \"min\": 0.0}"),
+ new Query_DesiredResult("len(@.foo[:]{blah: 1})", "3"),
+ new Query_DesiredResult("str(@.foo[0]{a: @[0], b: @[1]})", "{\"a\": \"0\", \"b\": \"1\"}"),
+ new Query_DesiredResult("max_by(@.foo[:]{mx: max(@), first: @[0]}, mx)", "{\"mx\": 8.0, \"first\": 6.0}"),
// recursive search
- ("@..g`\\\\d`", "[[{\"foo\": 2}, 1], 0]"),
- ("@..[foo,`0`]", "[[[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]], 2, 0]"),
- ("@..`7`[0].foo", "[2]"),
- ("@._..`0`", "[0]"),
- ("@.bar..[a, b]", "[false, [\"a`g\", \"bah\"]]"),
- ("@.bar..c", "{}"),
- ("@.bar..[a, c]", "[false]"),
- ("@.`7`..foo", "[2]"),
+ new Query_DesiredResult("@..g`\\\\d`", "[[{\"foo\": 2}, 1], 0]"),
+ new Query_DesiredResult("@..[foo,`0`]", "[[[0, 1, 2], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]], 2, 0]"),
+ new Query_DesiredResult("@..`7`[0].foo", "[2]"),
+ new Query_DesiredResult("@._..`0`", "[0]"),
+ new Query_DesiredResult("@.bar..[a, b]", "[false, [\"a`g\", \"bah\"]]"),
+ new Query_DesiredResult("@.bar..c", "{}"),
+ new Query_DesiredResult("@.bar..[a, c]", "[false]"),
+ new Query_DesiredResult("@.`7`..foo", "[2]"),
};
int ii = 0;
int tests_failed = 0;
JNode result;
- foreach ((string query, string desired_result_str) in testcases)
+ foreach (Query_DesiredResult qd in testcases)
{
ii++;
- JNode desired_result = jsonParser.Parse(desired_result_str);
+ JNode jdesired_result = jsonParser.Parse(qd.desired_result);
try
{
- result = remesparser.Search(query, foo);
+ result = remesparser.Search(qd.query, foo);
}
catch (Exception ex)
{
tests_failed++;
- Console.WriteLine($"Expected remesparser.Search({query}, foo) to return {desired_result.ToString()}, but instead threw" +
+ Console.WriteLine($"Expected remesparser.Search({qd.query}, foo) to return {jdesired_result.ToString()}, but instead threw" +
$" an exception:\n{ex}");
continue;
}
- if (result.type != desired_result.type || !result.Equals(desired_result))
+ if (result.type != jdesired_result.type || !result.Equals(jdesired_result))
{
tests_failed++;
- Console.WriteLine($"Expected remesparser.Search({query}, foo) to return {desired_result.ToString()}, " +
+ Console.WriteLine($"Expected remesparser.Search({qd.query}, foo) to return {jdesired_result.ToString()}, " +
$"but instead got {result.ToString()}.");
}
}
@@ -2232,14 +2314,31 @@ public static void Test()
public class RemesPathBenchmarker
{
- public static (JNode json, int len, long[] times) LoadJsonAndTime(string fname, int num_trials = 8)
+ ///
+ /// Repeatedly parse the JSON of a large file (big_random.json, about 1MB, containing nested arrays, dicts,
+ /// with ints, floats and strings as scalars)
+ /// Also repeatedly run a Remespath query on the JSON.
+ /// MOST RECENT RESULTS:
+ /// To convert JSON string of size 975068 into JNode took 185.589 +/- 53.713 ms over 14 trials
+ /// Load times(ms): 214,175,222,181,267,248,229,171,175,248,139,121,114,87
+ /// Compiling query "@[@[:].z =~ `(?i)[a-z]{5}`]" took 0.056 ms(one-time cost b/c caching)
+ /// To run query "@[@[:].z =~ `(?i)[a-z]{5}`]" on JNode from JSON of size 975068 into took 1.854 +/- 3.915 ms over 14 trials
+ /// Query times(ms) : 1.718,1.709,1.024,0.92,0.836,0.756,15.882,0.666,0.438,0.385,0.386,0.364,0.41,0.454
+ /// For reference, the Python standard library JSON parser is about 10x FASTER than JsonParser.Parse,
+ /// and my Python remespath implementation is 10-30x SLOWER than this remespath implementation.
+ ///
+ ///
+ ///
+ public static void Benchmark(string query, string fname, int num_trials = 8)
{
+ // setup
JsonParser jsonParser = new JsonParser();
Stopwatch watch = new Stopwatch();
string jsonstr = File.ReadAllText(fname);
int len = jsonstr.Length;
- long[] times = new long[num_trials];
+ long[] load_times = new long[num_trials];
JNode json = new JNode(null, Dtype.NULL, 0);
+ // benchmark time to load json
for (int ii = 0; ii < num_trials; ii++)
{
watch.Reset();
@@ -2247,15 +2346,22 @@ public static (JNode json, int len, long[] times) LoadJsonAndTime(string fname,
json = jsonParser.Parse(jsonstr);
watch.Stop();
long t = watch.Elapsed.Ticks;
- times[ii] = t;
+ load_times[ii] = t;
}
- return (json, len, times);
- }
-
- public static (JNode json, long[] times, long compile_time) TimeRemesPathQuery(JNode json, string query, int num_trials = 8)
- {
- Stopwatch watch = new Stopwatch();
- long[] times = new long[num_trials];
+ // display loading results
+ string json_preview = json.ToString().Slice(":300") + "\n...";
+ Console.WriteLine($"Preview of json: {json_preview}");
+ double[] mu_sd = GetMeanAndSd(load_times);
+ Console.WriteLine($"To convert JSON string of size {len} into JNode took {ConvertTicks(mu_sd[0])} +/- {ConvertTicks(mu_sd[1])} " +
+ $"ms over {load_times.Length} trials");
+ var load_times_str = new string[load_times.Length];
+ for (int ii = 0; ii < load_times.Length; ii++)
+ {
+ load_times_str[ii] = (load_times[ii] / 10000).ToString();
+ }
+ Console.WriteLine($"Load times (ms): {String.Join(", ", load_times_str)}");
+ // time remespath query
+ long[] query_times = new long[num_trials];
RemesParser parser = new RemesParser();
JNode result = new JNode(null, Dtype.NULL, 0);
watch.Start();
@@ -2269,12 +2375,25 @@ public static (JNode json, long[] times, long compile_time) TimeRemesPathQuery(J
result = query_func(json);
watch.Stop();
long t = watch.Elapsed.Ticks;
- times[ii] = t;
+ query_times[ii] = t;
+ }
+ // display querying results
+ mu_sd = GetMeanAndSd(query_times);
+ double mu = mu_sd[0];
+ double sd = mu_sd[1];
+ Console.WriteLine($"Compiling query \"{query}\" took {ConvertTicks(compile_time)} ms (one-time cost b/c caching)");
+ Console.WriteLine($"To run query \"{query}\" on JNode from JSON of size {len} into took {ConvertTicks(mu)} +/- {ConvertTicks(sd)} ms over {load_times.Length} trials");
+ var query_times_str = new string[query_times.Length];
+ for (int ii = 0; ii < query_times.Length; ii++)
+ {
+ query_times_str[ii] = Math.Round(query_times[ii] / 1e4, 3).ToString();
}
- return (result, times, compile_time);
+ Console.WriteLine($"Query times (ms): {String.Join(", ", query_times_str)}");
+ string result_preview = result.ToString().Slice(":300") + "\n...";
+ Console.WriteLine($"Preview of result: {result_preview}");
}
- public static (double mu, double sd) GetMeanAndSd(long[] times)
+ public static double[] GetMeanAndSd(long[] times)
{
double mu = 0;
foreach (int t in times) { mu += t; }
@@ -2286,7 +2405,7 @@ public static (double mu, double sd) GetMeanAndSd(long[] times)
sd += diff * diff;
}
sd = Math.Sqrt(sd / times.Length);
- return (mu, sd);
+ return new double[] { mu, sd };
}
public static double ConvertTicks(double ticks, string new_unit = "ms", int sigfigs = 3)
@@ -2300,48 +2419,6 @@ public static double ConvertTicks(double ticks, string new_unit = "ms", int sigf
default: throw new ArgumentException("Time unit must be s, mus, ms, or ns");
}
}
-
- ///
- /// Repeatedly parse the JSON of a large file (big_random.json, about 1MB, containing nested arrays, dicts,
- /// with ints, floats and strings as scalars)
- /// Also repeatedly run a Remespath query on the JSON.
- /// MOST RECENT RESULTS:
- /// To convert JSON string of size 975068 into JNode took 185.589 +/- 53.713 ms over 14 trials
- /// Load times(ms): 214,175,222,181,267,248,229,171,175,248,139,121,114,87
- /// Compiling query "@[@[:].z =~ `(?i)[a-z]{5}`]" took 0.056 ms(one-time cost b/c caching)
- /// To run query "@[@[:].z =~ `(?i)[a-z]{5}`]" on JNode from JSON of size 975068 into took 1.854 +/- 3.915 ms over 14 trials
- /// Query times(ms) : 1.718,1.709,1.024,0.92,0.836,0.756,15.882,0.666,0.438,0.385,0.386,0.364,0.41,0.454
- /// For reference, the Python standard library JSON parser is about 10x FASTER than JsonParser.Parse,
- /// and my Python remespath implementation is 10-30x SLOWER than this remespath implementation.
- ///
- ///
- ///
- public static void BenchmarkBigFile(string query, int num_trials = 8)
- {
- (JNode json, int len, long[] load_times) = LoadJsonAndTime(@"C:\Users\mjols\Documents\csharp\JSON_Viewer_cmd\testfiles\big_random.json", num_trials);
- string json_preview = json.ToString().Slice(":300") + "\n...";
- Console.WriteLine($"Preview of json: {json_preview}");
- (double mu_load, double sd_load) = GetMeanAndSd(load_times);
- Console.WriteLine($"To convert JSON string of size {len} into JNode took {ConvertTicks(mu_load)} +/- {ConvertTicks(sd_load)} ms over {load_times.Length} trials");
- var load_times_str = new string[load_times.Length];
- for (int ii = 0; ii < load_times.Length; ii++)
- {
- load_times_str[ii] = (load_times[ii] / 10000).ToString();
- }
- Console.WriteLine($"Load times (ms): {String.Join(',', load_times_str)}");
- (JNode result, long[] times, long compile_time) = TimeRemesPathQuery(json, query, num_trials);
- (double mu, double sd) = GetMeanAndSd(times);
- Console.WriteLine($"Compiling query \"{query}\" took {ConvertTicks(compile_time)} ms (one-time cost b/c caching)");
- Console.WriteLine($"To run query \"{query}\" on JNode from JSON of size {len} into took {ConvertTicks(mu)} +/- {ConvertTicks(sd)} ms over {load_times.Length} trials");
- var query_times_str = new string[times.Length];
- for (int ii = 0; ii < times.Length; ii++)
- {
- query_times_str[ii] = Math.Round(times[ii] / 1e4, 3).ToString();
- }
- Console.WriteLine($"Query times (ms): {String.Join(',', query_times_str)}");
- string result_preview = result.ToString().Slice(":300") + "\n...";
- Console.WriteLine($"Preview of result: {result_preview}");
- }
}
#endregion
}
diff --git a/JsonToolsNppPlugin/JSONTools/RemesPathFunctions.cs b/JsonToolsNppPlugin/JSONTools/RemesPathFunctions.cs
index def3217..6804ca1 100644
--- a/JsonToolsNppPlugin/JSONTools/RemesPathFunctions.cs
+++ b/JsonToolsNppPlugin/JSONTools/RemesPathFunctions.cs
@@ -6,8 +6,9 @@
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
+using JSON_Tools.Utils;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
///
/// Binary operators, e.g., +, -, *, ==
@@ -61,7 +62,7 @@ JNode ResolvedBinop(JNode json)
public static JNode Add(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (atype == Dtype.INT && btype == Dtype.INT)
{
@@ -80,7 +81,7 @@ public static JNode Add(JNode a, JNode b)
public static JNode Sub(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (atype == Dtype.INT && btype == Dtype.INT)
{
@@ -91,7 +92,7 @@ public static JNode Sub(JNode a, JNode b)
public static JNode Mul(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (atype == Dtype.INT && btype == Dtype.INT)
{
@@ -128,7 +129,7 @@ public static JNode NegPow(JNode a, JNode b)
public static JNode Mod(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (atype == Dtype.INT && btype == Dtype.INT)
{
@@ -221,7 +222,7 @@ public static JNode IsNotEqual(JNode a, JNode b)
///
public static void MinusEquals(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (btype == Dtype.FLOAT && atype != Dtype.FLOAT)
{
@@ -245,7 +246,7 @@ public static void MinusEquals(JNode a, JNode b)
///
public static void PlusEquals(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (btype == Dtype.FLOAT && atype != Dtype.FLOAT)
{
@@ -269,7 +270,7 @@ public static void PlusEquals(JNode a, JNode b)
///
public static void TimesEquals(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
if (btype == Dtype.FLOAT && atype != Dtype.FLOAT)
{
@@ -294,7 +295,7 @@ public static void TimesEquals(JNode a, JNode b)
///
public static void PowEquals(JNode a, JNode b)
{
- object? aval = a.value; object? bval = b.value;
+ object aval = a.value; object bval = b.value;
Dtype atype = a.type; Dtype btype = b.type;
a.value = Math.Pow(Convert.ToDouble(aval), Convert.ToDouble(bval));
a.type = Dtype.FLOAT;
@@ -503,12 +504,12 @@ public static JNode Mean(JNode[] args)
/// "Flattens" nested lists by adding all the elements of lists at depth 1 to a single
/// list.
/// Example: Flatten({{1,2},3,{4,{5}}}) = {1,2,3,4,{5}}
- /// (except input is an array with one List and output is a List)
+ /// (except input is an array with one List and output is a List)
/// In the above example, everything at depth 0 is still at depth 0, and everything else
/// has its depth reduced by 1.
///
- /// an array containng a single List
- /// List containing the flattened result
+ /// an array containng a single List
+ /// List containing the flattened result
public static JNode Flatten(JNode[] args)
{
var itbl = (JArray)args[0];
@@ -543,7 +544,7 @@ public static JNode Flatten(JNode[] args)
}
///
- /// first arg should be List<object?>, second arg should be object?,
+ /// first arg should be List<object>, second arg should be object,
/// optional third arg should be bool.
/// If second arg (elt) is in first arg (itbl), return the index in itbl where
/// elt first occurs.
@@ -584,7 +585,8 @@ public static JNode Index(JNode[] args)
public static JNode Max(JNode[] args)
{
var itbl = (JArray)args[0];
- JNode biggest = new JNode(double.NegativeInfinity, Dtype.FLOAT, 0);
+ double neginf = 1d; // double.NegativeInfinity
+ JNode biggest = new JNode(neginf, Dtype.FLOAT, 0);
foreach (JNode child in itbl.children)
{
if (Convert.ToDouble(child.value) > Convert.ToDouble(biggest.value)) { biggest = child; }
@@ -601,7 +603,8 @@ public static JNode Max(JNode[] args)
public static JNode Min(JNode[] args)
{
var itbl = (JArray)args[0];
- JNode smallest = new JNode(double.PositiveInfinity, Dtype.FLOAT, 0);
+ double inf = 1d; // double.PositiveInfinity
+ JNode smallest = new JNode(inf, Dtype.FLOAT, 0);
foreach (JNode child in itbl.children)
{
if (Convert.ToDouble(child.value) < Convert.ToDouble(smallest.value)) { smallest = child; }
@@ -815,8 +818,10 @@ public static JNode Keys(JNode[] args)
public static JNode Items(JNode[] args)
{
var its = new List();
- foreach ((string k, JNode v) in ((JObject)args[0]).children)
+ JObject obj = (JObject)args[0];
+ foreach (string k in obj.children.Keys)
{
+ JNode v = obj.children[k];
JNode knode = new JNode(k, Dtype.STR, 0);
var subarr = new List();
subarr.Add(knode);
@@ -830,13 +835,13 @@ public static JNode Unique(JNode[] args)
{
var itbl = (JArray)args[0];
var is_sorted = args[1].value;
- var uniq = new HashSet();
+ var uniq = new HashSet();
foreach (JNode val in itbl.children)
{
uniq.Add(val.value);
}
var uniq_list = new List();
- foreach (object? val in uniq)
+ foreach (object val in uniq)
{
uniq_list.Add(ObjectsToJNode(val));
}
@@ -908,17 +913,19 @@ public static JNode ValueCounts(JNode[] args)
var uniqs = new Dictionary();
foreach (JNode elt in itbl.children)
{
- object? val = elt.value;
+ object val = elt.value;
if (val == null)
{
throw new RemesPathException("Can't count occurrences of objects with null values");
}
- uniqs.TryAdd(val, 0);
+ if (!uniqs.ContainsKey(val))
+ uniqs[val] = 0;
uniqs[val]++;
}
var uniq_arr = new JArray(0, new List());
- foreach ((object elt, long ct) in uniqs)
+ foreach (object elt in uniqs.Keys)
{
+ long ct = uniqs[elt];
JArray elt_ct = new JArray(0, new List());
elt_ct.children.Add(ObjectsToJNode(elt));
elt_ct.children.Add(new JNode(ct, Dtype.INT, 0));
@@ -956,7 +963,7 @@ public static JNode StringJoin(JNode[] args)
public static JNode GroupBy(JNode[] args)
{
var itbl = (JArray)args[0];
- object? key = args[1].value;
+ object key = args[1].value;
if (!(key is string || key is long))
{
throw new ArgumentException("The GroupBy function can only group by string keys or int indices");
@@ -971,10 +978,14 @@ public static JNode GroupBy(JNode[] args)
JArray subobj = (JArray)node;
JNode val = subobj.children[ikey];
vstr = val.type == Dtype.STR ? (string)val.value : val.ToString();
- if (!gb.TryAdd(vstr, new JArray(0, new List { subobj })))
+ if (gb.ContainsKey(vstr))
{
((JArray)gb[vstr]).children.Add(subobj);
}
+ else
+ {
+ gb[vstr] = new JArray(0, new List { subobj });
+ }
}
}
else
@@ -985,10 +996,14 @@ public static JNode GroupBy(JNode[] args)
JObject subobj = (JObject)node;
JNode val = subobj.children[skey];
vstr = val.type == Dtype.STR ? (string)val.value : val.ToString();
- if (!gb.TryAdd(vstr, new JArray(0, new List { subobj })))
+ if (gb.ContainsKey(vstr))
{
((JArray)gb[vstr]).children.Add(subobj);
}
+ else
+ {
+ gb[vstr] = new JArray(0, new List { subobj });
+ }
}
}
return new JObject(0, gb);
@@ -1143,7 +1158,7 @@ public static JNode StrCount(JNode[] args)
}
///
- /// Get a List containing all non-overlapping occurrences of regex pattern pat in
+ /// Get a List containing all non-overlapping occurrences of regex pattern pat in
/// string node
///
/// string
@@ -1266,7 +1281,7 @@ public static JNode IfElse(JNode[] args)
public static JNode Log(JNode[] args)
{
double num = Convert.ToDouble(args[0].value);
- object? Base = args[1].value;
+ object Base = args[1].value;
if (Base == null)
{
return new JNode(Math.Log(num), Dtype.FLOAT, 0);
@@ -1276,7 +1291,7 @@ public static JNode Log(JNode[] args)
public static JNode Log2(JNode[] args)
{
- return new JNode(Math.Log2(Convert.ToDouble(args[0].value)), Dtype.FLOAT, 0);
+ return new JNode(Math.Log(Convert.ToDouble(args[0].value), 2), Dtype.FLOAT, 0);
}
public static JNode Abs(JNode[] args)
@@ -1375,7 +1390,7 @@ public static JNode Uminus(JNode[] args)
#endregion
- public static JNode ObjectsToJNode(object? obj)
+ public static JNode ObjectsToJNode(object obj)
{
if (obj == null)
{
@@ -1397,25 +1412,26 @@ public static JNode ObjectsToJNode(object? obj)
{
return new JNode((bool)obj, Dtype.BOOL, 0);
}
- if (obj is List)
+ if (obj is List)
{
var nodes = new List();
- foreach (object? child in (List)obj)
+ foreach (object child in (List)obj)
{
nodes.Add(ObjectsToJNode(child));
}
return new JArray(0, nodes);
}
- else if (obj is Dictionary)
+ else if (obj is Dictionary)
{
var nodes = new Dictionary();
- foreach ((string key, object? val) in (Dictionary)obj)
+ var dobj = (Dictionary)obj;
+ foreach (string key in dobj.Keys)
{
- nodes[key] = ObjectsToJNode(val);
+ nodes[key] = ObjectsToJNode(dobj[key]);
}
return new JObject(0, nodes);
}
- throw new ArgumentException("Cannot convert any objects to JNode except null, long, double, bool, string, List, or Dictionary, or Dictionary
@@ -1424,7 +1440,7 @@ public static JNode ObjectsToJNode(object? obj)
///
///
///
- public static object? JNodeToObjects(JNode node)
+ public static object JNodeToObjects(JNode node)
{
// if it's not an obj, arr, or unknown, just return its value
if ((node.type & Dtype.ITERABLE) == 0)
@@ -1437,14 +1453,15 @@ public static JNode ObjectsToJNode(object? obj)
}
if (node.type == Dtype.OBJ)
{
- var dic = new Dictionary();
- foreach ((string key, JNode val) in ((JObject)node).children)
+ var dic = new Dictionary();
+ JObject onode = (JObject)node;
+ foreach (string key in onode.children.Keys)
{
- dic[key] = JNodeToObjects(val);
+ dic[key] = JNodeToObjects(onode.children[key]);
}
return dic;
}
- var arr = new List();
+ var arr = new List();
foreach (JNode val in ((JArray)node).children)
{
arr.Add(JNodeToObjects(val));
@@ -1569,19 +1586,22 @@ public static void Test()
{
JsonParser jsonParser = new JsonParser();
JNode jtrue = jsonParser.Parse("true"); JNode jfalse = jsonParser.Parse("false");
- var testcases = new (JNode x, JNode y, Binop bop, JNode desired, string msg)[]
- {
- (jsonParser.Parse("1"), jsonParser.Parse("3"), Binop.BINOPS["-"], jsonParser.Parse("-2"), "subtraction of ints"),
- (jsonParser.Parse("2.5"), jsonParser.Parse("5"), Binop.BINOPS["/"], jsonParser.Parse("0.5"), "division of float by int"),
- (jsonParser.Parse("\"a\""), jsonParser.Parse("\"b\""), Binop.BINOPS["+"], jsonParser.Parse("\"ab\""), "addition of strings"),
- (jsonParser.Parse("3"), jsonParser.Parse("4"), Binop.BINOPS[">="], jfalse, "comparison ge"),
- (jsonParser.Parse("7"), jsonParser.Parse("9"), Binop.BINOPS["<"], jtrue, "comparison lt"),
- (jsonParser.Parse("\"abc\""), jsonParser.Parse("\"ab+\""), Binop.BINOPS["=~"], jtrue, "has_pattern"),
+ var testcases = new object[][]
+ {
+ new object[]{ jsonParser.Parse("1"), jsonParser.Parse("3"), Binop.BINOPS["-"], jsonParser.Parse("-2"), "subtraction of ints" },
+ new object[]{ jsonParser.Parse("2.5"), jsonParser.Parse("5"), Binop.BINOPS["/"], jsonParser.Parse("0.5"), "division of float by int" },
+ new object[]{ jsonParser.Parse("\"a\""), jsonParser.Parse("\"b\""), Binop.BINOPS["+"], jsonParser.Parse("\"ab\""), "addition of strings" },
+ new object[]{ jsonParser.Parse("3"), jsonParser.Parse("4"), Binop.BINOPS[">="], jfalse, "comparison ge" },
+ new object[]{ jsonParser.Parse("7"), jsonParser.Parse("9"), Binop.BINOPS["<"], jtrue, "comparison lt" },
+ new object[]{ jsonParser.Parse("\"abc\""), jsonParser.Parse("\"ab+\""), Binop.BINOPS["=~"], jtrue, "has_pattern" },
};
int tests_failed = 0;
int ii = 0;
- foreach ((JNode x, JNode y, Binop bop, JNode desired, string msg) in testcases)
+ foreach (object[] test in testcases)
{
+ JNode x = (JNode)test[0], y = (JNode)test[1], desired = (JNode)test[3];
+ Binop bop = (Binop)test[2];
+ string msg = (string)test[4];
JNode output = bop.Call(x, y);
string str_desired = desired.ToString();
string str_output = output.ToString();
@@ -1608,12 +1628,12 @@ public static void Test()
JsonParser jsonParser = new JsonParser();
JNode jtrue = jsonParser.Parse("true");
JNode jfalse = jsonParser.Parse("false");
- var testcases = new (JNode[] args, ArgFunction f, JNode desired)[]
+ var testcases = new object[][]
{
- (new JNode[]{jsonParser.Parse("[1,2]")}, ArgFunction.FUNCTIONS["len"], new JNode(Convert.ToInt64(2), Dtype.INT, 0)),
- (new JNode[]{jsonParser.Parse("[1,2]"), jtrue}, ArgFunction.FUNCTIONS["sorted"], jsonParser.Parse("[2,1]")),
- (new JNode[]{jsonParser.Parse("[[1,2], [4, 1]]"), new JNode(Convert.ToInt64(1), Dtype.INT, 0), jfalse }, ArgFunction.FUNCTIONS["sort_by"], jsonParser.Parse("[[4, 1], [1, 2]]")),
- (new JNode[]{jsonParser.Parse("[1, 3, 2]")}, ArgFunction.FUNCTIONS["mean"], new JNode(2.0, Dtype.FLOAT, 0)),
+ new object[]{ new JNode[]{jsonParser.Parse("[1,2]")}, ArgFunction.FUNCTIONS["len"], new JNode(Convert.ToInt64(2), Dtype.INT, 0) },
+ new object[]{ new JNode[]{jsonParser.Parse("[1,2]"), jtrue}, ArgFunction.FUNCTIONS["sorted"], jsonParser.Parse("[2,1]") },
+ new object[]{ new JNode[]{jsonParser.Parse("[[1,2], [4, 1]]"), new JNode(Convert.ToInt64(1), Dtype.INT, 0), jfalse }, ArgFunction.FUNCTIONS["sort_by"], jsonParser.Parse("[[4, 1], [1, 2]]") },
+ new object[]{ new JNode[]{jsonParser.Parse("[1, 3, 2]")}, ArgFunction.FUNCTIONS["mean"], new JNode(2.0, Dtype.FLOAT, 0) },
//(new JNode[]{jsonParser.Parse("[{\"a\": 1, \"b\": 2}, {\"a\": 3, \"b\": 1}]"), new JNode("b", Dtype.STR, 0)}, ArgFunction.FUNCTIONS["min_by"], jsonParser.Parse("{\"a\": 3, \"b\": 1}")),
//(new JNode[]{jsonParser.Parse("[\"ab\", \"bca\", \"\"]")}, ArgFunction.FUNCTIONS["s_len"], jsonParser.Parse("[2, 3, 0]")),
//(new JNode[]{jsonParser.Parse("[\"ab\", \"bca\", \"\"]"), new JNode("a", Dtype.STR, 0), new JNode("z", Dtype.STR, 0)}, ArgFunction.FUNCTIONS["s_sub"], jsonParser.Parse("[\"zb\", \"bcz\", \"\"]")),
@@ -1624,8 +1644,11 @@ public static void Test()
};
int tests_failed = 0;
int ii = 0;
- foreach ((JNode[] args, ArgFunction f, JNode desired) in testcases)
+ foreach (object[] test in testcases)
{
+ JNode[] args = (JNode[])test[0];
+ ArgFunction f = (ArgFunction)test[1];
+ JNode desired = (JNode)test[2];
JNode output = f.Call(args);
var sb = new StringBuilder();
sb.Append('{');
diff --git a/JsonToolsNppPlugin/JSONTools/RemesPathLexer.cs b/JsonToolsNppPlugin/JSONTools/RemesPathLexer.cs
index b556d54..3f90047 100644
--- a/JsonToolsNppPlugin/JSONTools/RemesPathLexer.cs
+++ b/JsonToolsNppPlugin/JSONTools/RemesPathLexer.cs
@@ -6,7 +6,7 @@ Breaks a Remespath query into tokens.
using System.Text.RegularExpressions;
using System.Text;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
public class RemesLexerException : Exception
{
@@ -51,16 +51,12 @@ public override string ToString()
public class RemesPathLexer
{
- //public static readonly Regex MASTER_REGEX = new Regex(
- //@"(&|\||\^|\+|-|/{1,2}|\*{1,2}|%|" + // most arithmetic and bitwise operators
- //@"[=!]=|[><]=?|=~|" + // comparison operators
- //@"[gj]?`(?:[^`]|(?<=\\)`)*(?
+ /// position in query string
+ ///
+ public int ii = 0;
+
+ public RemesPathLexer() { }
// note that '-' sign is not part of this num regex. That's because '-' is its own token and is handled
// separately from numbers
@@ -69,36 +65,29 @@ public class RemesPathLexer
@"([eE][-+]?\d+)?$", // optional scientific notation
RegexOptions.Compiled);
- //public static readonly HashSet DELIMITERS = new HashSet
- // {",", "[", "]", "(", ")", "{", "}", ".", ":", ".."};
-
- public static readonly string DELIMITERS = ",[](){}.:";
+ public static readonly HashSet DELIMITERS = new HashSet { ',', '[', ']', '(', ')', '{', '}', '.', ':' };
- public static readonly string BINOP_START_CHARS = "!%&*+-/<=>^|";
+ public static readonly HashSet BINOP_START_CHARS = new HashSet {'!', '%', '&', '*', '+', '-', '/', '<', '=', '>', '^', '|', };
- public static readonly string WHITESPACE = " \t\r\n";
+ public static readonly HashSet WHITESPACE = new HashSet { ' ', '\t', '\r', '\n' };
- public static readonly Dictionary CONSTANTS = new Dictionary
+ public static readonly Dictionary CONSTANTS = new Dictionary
{
["null"] = null,
- ["NaN"] = double.NaN,
- ["Infinity"] = double.PositiveInfinity,
+ //["NaN"] = double.NaN,
+ //["Infinity"] = double.PositiveInfinity,
["true"] = true,
["false"] = false
};
- public RemesPathLexer()
- {
- }
-
- public static (JNode num, int ii) ParseNumber(string q, int ii)
+ public JNode ParseNumber(string q)
{
StringBuilder sb = new StringBuilder();
// parsed tracks which portions of a number have been parsed.
- // So if the int part has been parsed, it will be "i".
- // If the int and decimal point parts have been parsed, it will be "id".
- // If the int, decimal point, and scientific notation parts have been parsed, it will be "ide"
- string parsed = "i";
+ // So if the int part has been parsed, it will be 1.
+ // If the int and decimal point parts have been parsed, it will be 3.
+ // If the int, decimal point, and scientific notation parts have been parsed, it will be 7
+ int parsed = 1;
char c;
while (ii < q.Length)
{
@@ -110,21 +99,21 @@ public static (JNode num, int ii) ParseNumber(string q, int ii)
}
else if (c == '.')
{
- if (parsed != "i")
+ if (parsed != 1)
{
throw new RemesLexerException(ii, q, "Number with two decimal points");
}
- parsed = "id";
+ parsed = 3;
sb.Append('.');
ii++;
}
else if (c == 'e' || c == 'E')
{
- if (parsed.Contains('e'))
+ if ((parsed & 4) != 0)
{
break;
}
- parsed += 'e';
+ parsed += 4;
sb.Append('e');
if (ii < q.Length - 1)
{
@@ -141,14 +130,14 @@ public static (JNode num, int ii) ParseNumber(string q, int ii)
break;
}
}
- if (parsed == "i")
+ if (parsed == 1)
{
- return (new JNode(long.Parse(sb.ToString()), Dtype.INT, 0), ii);
+ return new JNode(long.Parse(sb.ToString()), Dtype.INT, 0);
}
- return (new JNode(double.Parse(sb.ToString()), Dtype.FLOAT, 0), ii);
+ return new JNode(double.Parse(sb.ToString()), Dtype.FLOAT, 0);
}
- public static (JNode s, int ii) ParseQuotedString(string q, int ii)
+ public JNode ParseQuotedString(string q)
{
bool escaped = false;
char c;
@@ -159,7 +148,7 @@ public static (JNode s, int ii) ParseQuotedString(string q, int ii)
if (c == '`')
{
if (!escaped) {
- return (new JNode(sb.ToString(), Dtype.STR, 0), ii);
+ return new JNode(sb.ToString(), Dtype.STR, 0);
}
sb.Append(c);
escaped = false;
@@ -186,7 +175,7 @@ public static (JNode s, int ii) ParseQuotedString(string q, int ii)
///
///
///
- public static (object s, int ii) ParseUnquotedString(string q, int ii)
+ public object ParseUnquotedString(string q)
{
char c = q[ii++];
StringBuilder sb = new StringBuilder();
@@ -207,35 +196,35 @@ public static (object s, int ii) ParseUnquotedString(string q, int ii)
string uqs = sb.ToString();
if (CONSTANTS.ContainsKey(uqs))
{
- object? con = CONSTANTS[uqs];
+ object con = CONSTANTS[uqs];
if (con == null)
{
- return (new JNode(null, Dtype.NULL, 0), ii);
+ return new JNode(null, Dtype.NULL, 0);
}
else if (con is double)
{
- return (new JNode((double)con, Dtype.FLOAT, 0), ii);
+ return new JNode((double)con, Dtype.FLOAT, 0);
}
else
{
- return (new JNode((bool)con, Dtype.BOOL, 0), ii);
+ return new JNode((bool)con, Dtype.BOOL, 0);
}
}
else if (Binop.BINOPS.ContainsKey(uqs))
{
- return (Binop.BINOPS[uqs], ii);
+ return Binop.BINOPS[uqs];
}
else if (ArgFunction.FUNCTIONS.ContainsKey(uqs))
{
- return (ArgFunction.FUNCTIONS[uqs], ii);
+ return ArgFunction.FUNCTIONS[uqs];
}
else
{
- return (new JNode(uqs, Dtype.STR, 0), ii);
+ return new JNode(uqs, Dtype.STR, 0);
}
}
- public static (Binop bop, int ii) ParseBinop(string q, int ii)
+ public Binop ParseBinop(string q)
{
char c;
string bs = "";
@@ -251,14 +240,14 @@ public static (Binop bop, int ii) ParseBinop(string q, int ii)
bs = newbs;
ii++;
}
- return (Binop.BINOPS[bs], ii);
+ return Binop.BINOPS[bs];
}
- public static List Tokenize(string q)
+ public List Tokenize(string q)
{
JsonParser jsonParser = new JsonParser();
var tokens = new List();
- int ii = 0;
+ ii = 0;
char c;
JNode quoted_string;
object unquoted_string;
@@ -280,7 +269,8 @@ public static List Tokenize(string q)
else if (c >= '0' && c <= '9')
{
object curtok;
- (curtok, ii) = ParseNumber(q, ii-1);
+ ii--;
+ curtok = ParseNumber(q);
tokens.Add(curtok);
}
else if (DELIMITERS.Contains(c))
@@ -325,12 +315,14 @@ public static List Tokenize(string q)
{
if (q[ii] == '`')
{
- (quoted_string, ii) = ParseQuotedString(q, ii+1);
+ ii++;
+ quoted_string = ParseQuotedString(q);
tokens.Add(new JRegex(new Regex((string)quoted_string.value)));
}
else
{
- (unquoted_string, ii) = ParseUnquotedString(q, ii-1);
+ ii--;
+ unquoted_string = ParseUnquotedString(q);
tokens.Add(unquoted_string);
}
}
@@ -338,28 +330,32 @@ public static List Tokenize(string q)
{
if (q[ii] == '`')
{
- (quoted_string, ii) = ParseQuotedString(q, ii+1);
+ ii++;
+ quoted_string = ParseQuotedString(q);
tokens.Add(jsonParser.Parse((string)quoted_string.value));
}
else
{
- (unquoted_string, ii) = ParseUnquotedString(q, ii-1);
+ ii--;
+ unquoted_string = ParseUnquotedString(q);
tokens.Add(unquoted_string);
}
}
else if (c == '`')
{
- (quoted_string, ii) = ParseQuotedString(q, ii);
+ quoted_string = ParseQuotedString(q);
tokens.Add(quoted_string);
}
else if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_'))
{
- (unquoted_string, ii) = ParseUnquotedString(q, ii-1);
+ ii--;
+ unquoted_string = ParseUnquotedString(q);
tokens.Add(unquoted_string);
}
else if (BINOP_START_CHARS.Contains(c))
{
- (bop, ii) = ParseBinop(q, ii-1);
+ ii--;
+ bop = ParseBinop(q);
tokens.Add(bop);
}
else
@@ -388,30 +384,35 @@ class RemesPathLexerTester
public static void Test()
{
JsonParser jsonParser = new JsonParser();
- var testcases = new (string input, List desired, string msg)[]
+ RemesPathLexer lexer = new RemesPathLexer();
+ double inf = 1d;// double.PositiveInfinity;
+ var testcases = new object[][]
{
- ("@ + 2", new List(new object?[]{new CurJson(), Binop.BINOPS["+"], (long)2}), "cur_json binop scalar"),
- ("2.5 7e5 3.2e4 ", new List(new object?[]{2.5, 7e5, 3.2e4}), "all float formats"),
- ("abc_2 `ab\\`c`", new List(new object?[]{"abc_2", "ab`c"}), "unquoted and quoted strings"),
- ("len(null, Infinity)", new List(new object?[]{ArgFunction.FUNCTIONS["len"], '(', null, ',', Double.PositiveInfinity, ')'}), "arg function, constants, delimiters"),
- ("j`[1,\"a\\`\"]`", new List(new object?[]{jsonParser.Parse("[1,\"a`\"]")}), "json string"),
- ("g`a?[b\\`]`", new List(new object?[]{new Regex(@"a?[b`]") }), "regex"),
- (" - /", new List(new object?[]{Binop.BINOPS["-"], Binop.BINOPS["/"]}), "more binops"),
- (". []", new List(new object?[]{'.', '[', ']'}), "more delimiters"),
- ("3blue", new List(new object?[]{(long)3, "blue"}), "number immediately followed by string"),
- ("2.5+-2", new List(new object?[]{2.5, Binop.BINOPS["+"], Binop.BINOPS["-"], (long)2}), "number binop binop number, no whitespace"),
- ("`a`+@", new List(new object?[]{"a", Binop.BINOPS["+"], new CurJson()}), "quoted string binop curjson, no whitespace"),
- ("== in =~", new List(new object?[]{Binop.BINOPS["=="], ArgFunction.FUNCTIONS["in"], Binop.BINOPS["=~"]}), "two-character binops and argfunction in"),
- ("@[1,2]/3", new List(new object?[]{new CurJson(), '[', (long)1, ',', (long)2, ']', Binop.BINOPS["/"], (long)3}), "numbers and delimiters then binop number, no whitespace"),
- ("2 <=3!=", new List(new object?[]{(long)2, Binop.BINOPS["<="], (long)3, Binop.BINOPS["!="]}), "binop where a substring is also a binop")
+ new object[] { "@ + 2", new List(new object[]{new CurJson(), Binop.BINOPS["+"], (long)2}), "cur_json binop scalar" },
+ new object[] { "2.5 7e5 3.2e4 ", new List(new object[]{2.5, 7e5, 3.2e4}), "all float formats" },
+ new object[] { "abc_2 `ab\\`c`", new List(new object[]{"abc_2", "ab`c"}), "unquoted and quoted strings" },
+ new object[] { "len(null, Infinity)", new List(new object[]{ArgFunction.FUNCTIONS["len"], '(', null, ',', inf, ')'}), "arg function, constants, delimiters" },
+ new object[] { "j`[1,\"a\\`\"]`", new List(new object[]{jsonParser.Parse("[1,\"a`\"]")}), "json string" },
+ new object[] { "g`a?[b\\`]`", new List(new object[]{new Regex(@"a?[b`]") }), "regex" },
+ new object[] { " - /", new List(new object[]{Binop.BINOPS["-"], Binop.BINOPS["/"]}), "more binops" },
+ new object[] { ". []", new List(new object[]{'.', '[', ']'}), "more delimiters" },
+ new object[] { "3blue", new List(new object[]{(long)3, "blue"}), "number immediately followed by string" },
+ new object[] { "2.5+-2", new List(new object[]{2.5, Binop.BINOPS["+"], Binop.BINOPS["-"], (long)2}), "number binop binop number, no whitespace" },
+ new object[] { "`a`+@", new List(new object[]{"a", Binop.BINOPS["+"], new CurJson()}), "quoted string binop curjson, no whitespace" },
+ new object[] { "== in =~", new List(new object[]{Binop.BINOPS["=="], ArgFunction.FUNCTIONS["in"], Binop.BINOPS["=~"]}), "two-character binops and argfunction in" },
+ new object[] { "@[1,2]/3", new List(new object[]{new CurJson(), '[', (long)1, ',', (long)2, ']', Binop.BINOPS["/"], (long)3}), "numbers and delimiters then binop number, no whitespace" },
+ new object[] { "2 <=3!=", new List < object >(new object[] {(long) 2, Binop.BINOPS["<="],(long) 3, Binop.BINOPS["!="] }), "!=" }
};
int tests_failed = 0;
int ii = 0;
- foreach ((string input, List desired, string msg) in testcases)
+ foreach (object[] test in testcases)
{
- List output = RemesPathLexer.Tokenize(input);
+ //(string input, List desired, string msg)
+ string input = (string)test[0], msg = (string)test[2];
+ List desired = (List)test[1];
+ List output = lexer.Tokenize(input);
var sb_desired = new StringBuilder();
- foreach (object? desired_value in desired)
+ foreach (object desired_value in desired)
{
if (desired_value is int || desired_value is long || desired_value is double || desired_value is string || desired_value == null)
{
@@ -429,7 +430,7 @@ public static void Test()
}
string str_desired = sb_desired.ToString();
var sb_output = new StringBuilder();
- foreach (object? value in output)
+ foreach (object value in output)
{
if (value is JNode && !(value is CurJson))
{
@@ -457,7 +458,7 @@ public static void Test()
{
try
{
- RemesPathLexer.Tokenize(paren);
+ lexer.Tokenize(paren);
Console.WriteLine($"Test {ii} failed, expected exception due to unmatched '{paren}'");
tests_failed++;
}
@@ -467,7 +468,7 @@ public static void Test()
ii++;
try
{
- RemesPathLexer.Tokenize("1.5.2");
+ lexer.Tokenize("1.5.2");
tests_failed++;
Console.WriteLine($"Test {ii} failed, expected exception due to number with two decimal points");
}
diff --git a/JsonToolsNppPlugin/JSONTools/TestRunner.cs b/JsonToolsNppPlugin/JSONTools/TestRunner.cs
index 215f9f9..e0e4891 100644
--- a/JsonToolsNppPlugin/JSONTools/TestRunner.cs
+++ b/JsonToolsNppPlugin/JSONTools/TestRunner.cs
@@ -6,9 +6,11 @@ There is also a CLI utility that accepts a letter ('j' or 'y') and a filename as
*/
using System;
using System.IO;
+using System.Linq;
using System.Text;
+using JSON_Tools.Utils;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
public class TestRunner
{
@@ -74,7 +76,8 @@ Testing RemesPath parser and compiler
Performance tests for JsonParser and RemesPath
=========================
");
- RemesPathBenchmarker.BenchmarkBigFile("@[@[:].z =~ `(?i)[a-z]{5}`]", 14);
+ string big_random_fname = "../../testfiles/big_random.json";
+ RemesPathBenchmarker.Benchmark("@[@[:].z =~ `(?i)[a-z]{5}`]", big_random_fname, 14);
// because Visual Studio runs a whole bunch of other things in the background
// when I build my project, the benchmarking suite
// makes my code seem way slower than it actually is when it's running unhindered.
@@ -95,18 +98,18 @@ Testing JSON parser's linter functionality
=========================
");
JsonParserTester.TestLinter();
- Console.WriteLine(@"=========================
-Testing JSON grepper's file reading ability
-=========================
-");
- JsonGrepperTester.TestFnames();
+// Console.WriteLine(@"=========================
+//Testing JSON grepper's file reading ability
+//=========================
+//");
+// JsonGrepperTester.TestFnames();
}
else
{
JsonParser jsonParser = new JsonParser();
string out_type = args[0].ToLower();
// Slice extension method from JsonPath module
- string fname = String.Join(' ', args.Slice("1:"));
+ string fname = string.Join(" ", args.Slice("1:").ToArray());
StreamReader streamReader = new StreamReader(fname);
string jsonstr = streamReader.ReadToEnd();
JNode json = jsonParser.Parse(jsonstr);
diff --git a/JsonToolsNppPlugin/JSONTools/YamlDumper.cs b/JsonToolsNppPlugin/JSONTools/YamlDumper.cs
index e1591ba..abfb3ce 100644
--- a/JsonToolsNppPlugin/JSONTools/YamlDumper.cs
+++ b/JsonToolsNppPlugin/JSONTools/YamlDumper.cs
@@ -3,10 +3,11 @@ Reads a JSON document and outputs YAML that can be serialized back to
equivalent (or very nearly equivalent) JSON.
*/
using System;
+using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.JSON_Tools
{
public class YamlDumper
{
@@ -126,11 +127,11 @@ private string YamlValRepr(JNode v)
if (double.TryParse(strv, out double d))
{
// k is a number
- switch (d)
- {
- case double.PositiveInfinity: return ".inf";
- case double.NegativeInfinity: return "-.inf";
- }
+ //switch (d)
+ //{
+ // case double.PositiveInfinity: return ".inf";
+ // case double.NegativeInfinity: return "-.inf";
+ //}
if (double.IsNaN(d))
{
return ".nan";
@@ -233,17 +234,17 @@ public string Dump(JNode json)
public class YamlDumperTester
{
- public static int MyUnitTest((string, string, string)[] testcases)
+ public static int MyUnitTest(string[][] testcases)
{
JsonParser jsonParser = new JsonParser();
int tests_failed = 0;
YamlDumper yamlDumper = new YamlDumper();
for (int ii = 0; ii < testcases.Length; ii++)
{
- var input = testcases[ii].Item1;
+ var input = testcases[ii][0];
JNode json = jsonParser.Parse(input);
- var correct = testcases[ii].Item2;
- var description = testcases[ii].Item3;
+ var correct = testcases[ii][1];
+ var description = testcases[ii][2];
var result = yamlDumper.Dump(json, 2);
if (correct != result)
{
@@ -263,55 +264,42 @@ public static int MyUnitTest((string, string, string)[] testcases)
public static void Test()
{
- (string, string, string)[] tests = {
+ string[][] tests = {
// space at end of key
- ("{\"adogDOG! \": \"dog\"}", "\"adogDOG! \": dog\n",
- "space at end of key"),
- ("{\" adogDOG!\": \"dog\"}", "\" adogDOG!\": dog\n",
- "space at start of key"),
+ new string[]{ "{\"adogDOG! \": \"dog\"}", "\"adogDOG! \": dog\n",
+ "space at end of key" },
+ new string[]{ "{\" adogDOG!\": \"dog\"}", "\" adogDOG!\": dog\n",
+ "space at start of key" },
// space inside key
- ("{\"a dog DOG!\": \"dog\"}", "\"a dog DOG!\": dog\n",
- "space inside key"),
+ new string[]{ "{\"a dog DOG!\": \"dog\"}", "\"a dog DOG!\": dog\n",
+ "space inside key" },
// stringified nums as keys
- ("{\"9\": 9}", "'9': 9\n", "stringified num as key"),
+ new string[]{ "{\"9\": 9}", "'9': 9\n", "stringified num as key" },
//
- ("{\"9\": \"9\"}", "'9': '9'\n", "stringified num as val"),
- ("{\"9a\": \"9a\", \"a9.2\": \"a9.2\"}", "9a: 9a\na9.2: a9.2\n",
- "partially stringified nums as vals"),
- ("{\"a\\\"b'\": \"bub\\\"ar\"}", "a\"b': \"bub\\\"ar\"\n",
- "singlequotes and doublequotes inside key"),
- ("{\"a\": \"big\\nbad\\ndog\"}", "a: \"big\\nbad\\ndog\"\n",
- "values containing newlines"),
- ("{\"a\": \" big \"}", "a: \" big \"\n",
- "leading or ending space in dict value"),
- ("[\" big \"]", "- \" big \"\n",
- "leading or ending space in array value"),
- ("\"a \"", "\"a \"\n", "scalar string"),
- ("9", "9\n", "scalar int"),
- ("-940.3", "-940.3\n", "scalar float"),
- ("[true, false]", "- True\n- False\n", "scalar bools"),
- ("[null, Infinity, -Infinity, NaN]",
- "- null\n- .inf\n- -.inf\n- .nan\n",
- "null, +/-infinity, NaN"),
+ new string[] { "{\"9\": \"9\"}", "'9': '9'\n", "stringified num as val" },
+ new string[] { "{\"9a\": \"9a\", \"a9.2\": \"a9.2\"}", "9a: 9a\na9.2: a9.2\n", "partially stringified nums as vals" },
+ new string[] { "{\"a\\\"b'\": \"bub\\\"ar\"}", "a\"b': \"bub\\\"ar\"\n", "singlequotes and doublequotes inside key" },
+ new string[] { "{\"a\": \"big\\nbad\\ndog\"}", "a: \"big\\nbad\\ndog\"\n", "values containing newlines" },
+ new string[] { "{\"a\": \" big \"}", "a: \" big \"\n", "leading or ending space in dict value" },
+ new string[] { "[\" big \"]", "- \" big \"\n", "leading or ending space in array value" },
+ new string[] { "\"a \"", "\"a \"\n", "scalar string" },
+ new string[] { "9", "9\n", "scalar int" },
+ new string[] { "-940.3", "-940.3\n", "scalar float" },
+ new string[] { "[true, false]", "- True\n- False\n", "scalar bools" },
+ new string[] { "[null, Infinity, -Infinity, NaN]", "- null\n- .inf\n- -.inf\n- .nan\n", "null, +/-infinity, NaN" },
// in the below case, there's actually a bit of an error;
// it is better to dump the float 2.0 as '2.0', but this algorithm dumps it
// as an integer.
// So there's some room for improvement here
- ("{\"a\": [[1, 2.0], { \"3\": [\"5\"]}], \"2\": 6}",
+ new string[]{ "{\"a\": [[1, 2.0], { \"3\": [\"5\"]}], \"2\": 6}",
"a:\n -\n - 1\n - 2.0\n -\n '3':\n - '5'\n'2': 6\n",
- "nested iterables"),
- ("{\"a\": \"a: b\"}", "a: \"a: b\"\n", "value contains colon"),
- ("{\"a: b\": \"a\"}", "\"a: b\": a\n", "key contains colon"),
- ("{\"a\": \"RT @blah: MondayMo\\\"r\'ing\"}",
- "a: \'RT @blah: MondayMo\"r\'\'ing\'\n",
- "Value contains quotes and colon"),
- ("{\"a\": \"a\\n\'big\'\\ndog\"}", "a: \"a\\n\'big\'\\ndog\"\n",
- "Value contains quotes and newline"),
- ("{\"a\": \"RT @blah: MondayMo\\nring\"}",
- "a: \"RT @blah: MondayMo\\nring\"\n",
- "value contains newline and colon"),
- ("{\"\\\"a: 'b'\": \"a\"}", "\'\"a: \'\'b\'\'\': a\n",
- "key contains quotes and colon")
+ "nested iterables" },
+ new string[] { "{\"a\": \"a: b\"}", "a: \"a: b\"\n", "value contains colon" },
+ new string[] { "{\"a: b\": \"a\"}", "\"a: b\": a\n", "key contains colon" },
+ new string[] { "{\"a\": \"RT @blah: MondayMo\\\"r\'ing\"}", "a: \'RT @blah: MondayMo\"r\'\'ing\'\n", "Value contains quotes and colon" },
+ new string[] { "{\"a\": \"a\\n\'big\'\\ndog\"}", "a: \"a\\n\'big\'\\ndog\"\n", "Value contains quotes and newline" },
+ new string[] { "{\"a\": \"RT @blah: MondayMo\\nring\"}", "a: \"RT @blah: MondayMo\\nring\"\n", "value contains newline and colon" },
+ new string[] { "{\"\\\"a: 'b'\": \"a\"}", "\'\"a: \'\'b\'\'\': a\n", "key contains quotes and colon" }
};
MyUnitTest(tests);
}
diff --git a/JsonToolsNppPlugin/JsonToolsNppPlugin.csproj b/JsonToolsNppPlugin/JsonToolsNppPlugin.csproj
index d15e8e6..b989ff9 100644
--- a/JsonToolsNppPlugin/JsonToolsNppPlugin.csproj
+++ b/JsonToolsNppPlugin/JsonToolsNppPlugin.csproj
@@ -66,7 +66,24 @@
$(MSBuildProgramFiles32)\Notepad++\notepad++.exe
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Form
-
- frmGoToLine.cs
+
+
Resources.resx
@@ -130,13 +168,45 @@
True
Resources.resx
+
+ frmGoToLine.cs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
frmGoToLine.cs
Designer
@@ -146,12 +216,56 @@
ResXFileCodeGenerator
Resources1.Designer.cs
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/JsonToolsNppPlugin/Main.cs b/JsonToolsNppPlugin/Main.cs
index 11afb43..afcc62a 100644
--- a/JsonToolsNppPlugin/Main.cs
+++ b/JsonToolsNppPlugin/Main.cs
@@ -10,6 +10,10 @@
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using Kbg.NppPluginNET.PluginInfrastructure;
+using NppPluginNET.PluginInfrastructure;
+using JSON_Tools.JSON_Tools;
+using JSON_Tools.Utils;
+//using JSON_Viewer.Forms;
using static Kbg.NppPluginNET.PluginInfrastructure.Win32;
namespace Kbg.NppPluginNET
@@ -126,7 +130,7 @@ static internal void CommandMenuInit()
PluginBase.SetCommand(10, "---", null);
- PluginBase.SetCommand(11, "Get File Names Demo", getFileNamesDemo);
+ PluginBase.SetCommand(11, "Get Json file names", getFileNamesDemo);
PluginBase.SetCommand(12, "Get Session File Names Demo", getSessionFileNamesDemo);
PluginBase.SetCommand(13, "Save Current Session Demo", saveCurrentSessionDemo);
@@ -251,30 +255,35 @@ static void callbackWhatIsNpp(object data)
{
string text2display = (string)data;
notepad.FileNew();
+ string new_file_name = getCurrentPath(NppMsg.FULL_CURRENT_PATH);
Random srand = new Random(DateTime.Now.Millisecond);
int rangeMin = 0;
- int rangeMax = 250;
+ int rangeMax = 125;
for (int i = 0; i < text2display.Length; i++)
{
Thread.Sleep(srand.Next(rangeMin, rangeMax) + 30);
+ // stop adding new text if the user closes or switches out of the new file.
+ // otherwise you get this obnoxious addition of text to existing files.
+ string selected_file_name = getCurrentPath(NppMsg.FULL_CURRENT_PATH);
+ if (selected_file_name != new_file_name) break;
editor.AppendTextAndMoveCursor(text2display[i].ToString());
}
}
static void insertCurrentFullPath()
{
- insertCurrentPath(NppMsg.FULL_CURRENT_PATH);
+ editor.ReplaceSel(getCurrentPath(NppMsg.FULL_CURRENT_PATH));
}
static void insertCurrentFileName()
{
- insertCurrentPath(NppMsg.FILE_NAME);
+ editor.ReplaceSel(getCurrentPath(NppMsg.FILE_NAME));
}
static void insertCurrentDirectory()
{
- insertCurrentPath(NppMsg.CURRENT_DIRECTORY);
+ editor.ReplaceSel(getCurrentPath(NppMsg.CURRENT_DIRECTORY));
}
- static void insertCurrentPath(NppMsg which)
+ static string getCurrentPath(NppMsg which)
{
NppMsg msg = NppMsg.NPPM_GETFULLCURRENTPATH;
if (which == NppMsg.FILE_NAME)
@@ -285,7 +294,7 @@ static void insertCurrentPath(NppMsg which)
StringBuilder path = new StringBuilder(Win32.MAX_PATH);
Win32.SendMessage(PluginBase.nppData._nppHandle, (uint) msg, 0, path);
- editor.ReplaceSel(path.ToString());
+ return path.ToString();
}
static void insertShortDateTime()
@@ -307,7 +316,7 @@ static void checkInsertHtmlCloseTag()
PluginBase.CheckMenuItemToggle(9, ref doCloseTag); // 9 = menu item index
}
- static Regex regex = new Regex(@"[\._\-:\w]", RegexOptions.Compiled);
+ static Regex XmlTagRegex = new Regex(@"[\._\-:\w]", RegexOptions.Compiled);
static internal void doInsertHtmlCloseTag(char newChar)
{
@@ -349,7 +358,7 @@ static internal void doInsertHtmlCloseTag(char newChar)
var insertString = new StringBuilder("");
- while (regex.IsMatch(buf[pCur].ToString()))
+ while (XmlTagRegex.IsMatch(buf[pCur].ToString()))
{
insertString.Append(buf[pCur]);
pCur++;
diff --git a/JsonToolsNppPlugin/Properties/AssemblyInfo.cs b/JsonToolsNppPlugin/Properties/AssemblyInfo.cs
index b4962e8..27c18e2 100644
--- a/JsonToolsNppPlugin/Properties/AssemblyInfo.cs
+++ b/JsonToolsNppPlugin/Properties/AssemblyInfo.cs
@@ -5,12 +5,12 @@
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
-[assembly: AssemblyTitle("Managed demo plugin for Notepad++")]
+[assembly: AssemblyTitle("JSON tools plugin for Notepad++")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("Kbg")]
-[assembly: AssemblyProduct("Managed demo plugin for Notepad++")]
-[assembly: AssemblyCopyright("Kbg 2016")]
+[assembly: AssemblyCompany("molsonkiko")]
+[assembly: AssemblyProduct("JSON tools plugin for Notepad++")]
+[assembly: AssemblyCopyright("molsonkiko 2022")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
@@ -29,5 +29,5 @@
// Build Number
// Revision
//
-[assembly: AssemblyVersion("0.7.0.0")]
-[assembly: AssemblyFileVersion("0.7.0.0")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/JsonToolsNppPlugin/Tools/Slice.cs b/JsonToolsNppPlugin/Tools/Slice.cs
deleted file mode 100644
index b8a2359..0000000
--- a/JsonToolsNppPlugin/Tools/Slice.cs
+++ /dev/null
@@ -1,374 +0,0 @@
-using System;
-using System.Collections.Generic;
-
-namespace JSON_Viewer.JSONViewer
-{
- public static class SliceExtensions
- {
- // TODO: Replace these slices with code copy-pasted from
- // https://github.com/henon/SliceAndDice/blob/master/src/SliceAndDice/Shape.cs
- // Those slices are better because (a) they are more versatile and (b) THEY ARE VIEWS RATHER THAN MEM-COPIERS
-
- ///
- /// Allows the use of Python-style slices, where start, stop, and stride must be declared as individual paramters.
- /// Thus e.g. arr.Slice(2, null, -1) is just like arr[slice(2, None, -1)] in Python.
- /// This just yields the elements one by one.
- /// If you want a function that yields a shallow copy of the sliced region of the iterable, use Slice<T> instead.
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- public static IEnumerable LazySlice(this IList source, int? start, int? stop = null, int? stride = null)
- {
- int len = (source is T[]) ? ((T[])source).Length : ((List)source).Count;
- int ind;
- if (stride == 0)
- {
- throw new ArgumentException("The stride parameter of a slice must be a non-zero integer");
- }
- if (len <= 0)
- {
- yield break;
- }
- int istart, istop, istride;
- if (start != null && stop == null && stride == null)
- {
- int temp = (int)start;
- stop = temp;
- start = 0;
- }
- else if (stride == null || stride > 0)
- {
- if (start == null) { start = 0; }
- if (stop == null) { stop = len; }
- }
- else
- {
- if (start == null)
- {
- start = len - 1;
- }
- if (stop == null)
- {
- istride = (int)stride;
- istart = (int)start;
- // we want "start::-x" or "::-x" to go from start
- // to first element by x
- istop = -1;
- if (istart < 0)
- {
- istart += len;
- }
- len = (istop - istart) / istride + 1;
- if (len % istride == 0)
- {
- // this is tricky! If len is divisible by stride,
- // we would overshoot into index -1 and get
- // an index error.
- len -= 1;
- }
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- yield return source[ind];
- }
- }
- }
- istop = (stop < 0) ? len + (int)stop : (stop > len ? len : (int)stop);
- istart = (start < 0) ? len + (int)start : (int)start;
- istride = (stride == null) ? 1 : (int)stride;
- if (istart >= len && istop >= len)
- {
- yield break;
- }
- if ((istop - istart) % Math.Abs(istride) == 0)
- {
- len = (istop - istart) / istride;
- }
- else
- {
- // if the final stride would carry you out of the array, the output array is going to be
- // one larger than (stop - start)/stride, because the first element is always in the output array.
- len = (istop - istart) / istride + 1;
- }
- if (len <= 0)
- {
- yield break;
- }
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- yield return source[ind];
- }
- }
-
- ///
- /// Allows the use of Python-style slices, passed as strings (e.g., ":", "1::-2").
- /// Because LazySlice is an extension method, all arrays in this namespace can use this method.
- /// See https://stackoverflow.com/questions/509211/understanding-slicing
- /// This just yields the elements one by one.
- /// If you want a function that yields a shallow copy of the sliced region of the iterable, use Slice<T> instead.
- ///
- public static IEnumerable LazySlice(this IList source, string slicer)
- // note the "this" before the type and type.
- // that designates Slice as a new method that can be used by T[] objects
- // (i.e., arrays of any object type) using the "." syntax.
- // That's why later on we see a.Slice(":" + a.Length) even though a doesn't
- // come with the Slice method built in.
- {
- string[] parts = slicer.Split(':');
- int? start = 0, stop = 0, stride = 1;
- switch (parts.Length)
- {
- case 1: start = 0; stop = int.Parse(parts[0]); break;
- case 2:
- if (parts[0] == "") start = null; else start = int.Parse(parts[0]);
- if (parts[1] == "") stop = null; else stop = int.Parse(parts[1]);
- break;
- case 3:
- if (parts[0] == "") start = null; else start = int.Parse(parts[0]);
- if (parts[1] == "") stop = null; else stop = int.Parse(parts[1]);
- if (parts[2] == "") stride = null; else stride = int.Parse(parts[2]);
- break;
- }
- foreach (T item in source.LazySlice(start, stop, stride)) { yield return item; }
- }
-
- ///
- /// This just yields the elements one by one.
- /// If you want a function that yields a shallow copy of the sliced region of the iterable, use Slice<T> instead.
- ///
- ///
- ///
- ///
- ///
- public static IEnumerable LazySlice(this IList source, int?[] slicer)
- {
- int? start = 0, stop = 0, stride = 1;
- switch (slicer.Length)
- {
- case 1: start = 0; stop = slicer[0]; break;
- case 2:
- start = slicer[0];
- stop = slicer[1];
- break;
- case 3:
- start = slicer[0];
- stop = slicer[1];
- stride = slicer[2];
- break;
- }
- foreach (T item in source.LazySlice(start, stop, stride)) { yield return item; }
- }
-
- ///
- /// Allows use of Python-style slices, except that these create a copy of the sliced object rather than a view.
- /// For higher performance at the cost of only producing an iterator and not a new iterable, use LazySlice.
- /// See the documentation for LazySlice with three int? arguments.
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- public static IList Slice(this IList source, int? start, int? stop = null, int? stride = null)
- {
- bool isarr = source is T[];
- bool islist = source is List;
- int len = isarr ? ((T[])source).Length : (islist ? ((List)source).Count : throw new NotImplementedException("Slice is only implemented for arrays and Lists. For slicing of other indexable iterables, use LazySlice."));
- IList res;
- int source_len = len;
- int ind;
- if (stride == 0)
- {
- throw new ArgumentException("The stride parameter of a slice must be a non-zero integer");
- }
- if (len <= 0)
- {
- if (isarr) return new T[0]; else return new List();
- }
- int istart, istop, istride;
- if (start != null && stop == null && stride == null)
- {
- int temp = (int)start;
- stop = temp;
- start = 0;
- }
- else if (stride == null || stride > 0)
- {
- if (start == null) { start = 0; }
- if (stop == null) { stop = len; }
- }
- else
- {
- if (start == null)
- {
- start = len - 1;
- }
- if (stop == null)
- {
- istride = (int)stride;
- istart = (int)start;
- // we want "start::-x" or "::-x" to go from start
- // to first element by x
- istop = -1;
- if (istart < 0)
- {
- istart += len;
- }
- else if (istart >= len)
- {
- istart = len - 1;
- }
- len = (istop - istart) / istride + 1;
- if (len % istride == 0)
- {
- // this is tricky! If len is divisible by stride,
- // we would overshoot into index -1 and get
- // an index error.
- len -= 1;
- }
- if (isarr)
- {
- res = new T[len];
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- res[ii] = source[ind];
- }
- return res;
- }
- res = new List();
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- res.Add(source[ind]);
- }
- return res;
- }
- }
- // make sure the start isn't higher than
- istop = (stop < 0) ? len + (int)stop : (stop > len ? len : (int)stop);
- istart = (start < 0) ? len + (int)start : (int)start;
- istride = (stride == null) ? 1 : (int)stride;
- if (istart >= len && istop >= len)
- {
- if (isarr) return new T[0]; else return new List();
- }
- if ((istop - istart) % Math.Abs(istride) == 0)
- {
- len = (istop - istart) / istride;
- }
- else
- {
- // if the final stride would carry you out of the array, the output array is going to be
- // one larger than (stop - start)/stride, because the first element is always in the output array.
- len = (istop - istart) / istride + 1;
- }
- if (len <= 0)
- {
- if (isarr) return new T[0]; else return new List();
- }
- if (isarr)
- {
- res = new T[len];
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- res[ii] = source[ind];
- }
- return res;
- }
- res = new List();
- for (int ii = 0; ii < len; ii++)
- {
- ind = istart + ii * istride;
- res.Add(source[ind]);
- }
- return res;
- }
-
- ///
- /// Allows use of Python-style slices, except that these create a copy of the sliced object rather than a view.
- /// For higher performance at the cost of only producing an iterator and not a new iterable, use LazySlice.
- /// See the documentation for LazySlice with a string argument
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- public static IList Slice(this IList source, string slicer)
- {
- string[] parts = slicer.Split(':');
- int? start = 0, stop = 0, stride = 1;
- switch (parts.Length)
- {
- case 1: start = 0; stop = int.Parse(parts[0]); break;
- case 2:
- if (parts[0] == "") start = null; else start = int.Parse(parts[0]);
- if (parts[1] == "") stop = null; else stop = int.Parse(parts[1]);
- break;
- case 3:
- if (parts[0] == "") start = null; else start = int.Parse(parts[0]);
- if (parts[1] == "") stop = null; else stop = int.Parse(parts[1]);
- if (parts[2] == "") stride = null; else stride = int.Parse(parts[2]);
- break;
- }
- return source.Slice(start, stop, stride);
- }
-
- ///
- /// Allows use of Python-style slices, except that these create a copy of the sliced object rather than a view.
- /// For higher performance at the cost of only producing an iterator and not a new iterable, use LazySlice.
- /// See the documentation for LazySlice with three int? arguments.
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- ///
- public static IList Slice(this IList source, int?[] slicer)
- {
- int? start = 0, stop = 0, stride = 1;
- switch (slicer.Length)
- {
- case 1: start = 0; stop = slicer[0]; break;
- case 2:
- start = slicer[0];
- stop = slicer[1];
- break;
- case 3:
- start = slicer[0];
- stop = slicer[1];
- stride = slicer[2];
- break;
- }
- return source.Slice(start, stop, stride);
- }
-
- public static string Slice(this string source, string slicer)
- {
- return new string((char[])source.ToCharArray().Slice(slicer));
- }
-
- public static string Slice(this string source, int? start, int? stop, int? stride)
- {
- return new string((char[])source.ToCharArray().Slice(start, stop, stride));
- }
-
- public static string Slice(this string source, int?[] slicer)
- {
- return new string((char[])source.ToCharArray().Slice(slicer));
- }
- }
-}
diff --git a/JsonToolsNppPlugin/Tools/Settings.cs b/JsonToolsNppPlugin/Utils/Settings.cs
similarity index 99%
rename from JsonToolsNppPlugin/Tools/Settings.cs
rename to JsonToolsNppPlugin/Utils/Settings.cs
index a879048..1ab9cec 100644
--- a/JsonToolsNppPlugin/Tools/Settings.cs
+++ b/JsonToolsNppPlugin/Utils/Settings.cs
@@ -5,7 +5,7 @@
using System.Linq;
using System.Windows.Forms;
-namespace JSON_Viewer.Infrastructure
+namespace JSON_Tools.Utils
{
///
/// Manages application settings
diff --git a/JsonToolsNppPlugin/JSONTools/Slice.cs b/JsonToolsNppPlugin/Utils/Slice.cs
similarity index 94%
rename from JsonToolsNppPlugin/JSONTools/Slice.cs
rename to JsonToolsNppPlugin/Utils/Slice.cs
index b8a2359..93adacf 100644
--- a/JsonToolsNppPlugin/JSONTools/Slice.cs
+++ b/JsonToolsNppPlugin/Utils/Slice.cs
@@ -1,7 +1,7 @@
using System;
using System.Collections.Generic;
-namespace JSON_Viewer.JSONViewer
+namespace JSON_Tools.Utils
{
public static class SliceExtensions
{
@@ -25,7 +25,7 @@ public static IEnumerable LazySlice(this IList source, int? start, int?
{
int len = (source is T[]) ? ((T[])source).Length : ((List)source).Count;
int ind;
- if (stride == 0)
+ if ((int)stride == 0)
{
throw new ArgumentException("The stride parameter of a slice must be a non-zero integer");
}
@@ -40,7 +40,7 @@ public static IEnumerable LazySlice(this IList source, int? start, int?
stop = temp;
start = 0;
}
- else if (stride == null || stride > 0)
+ else if (stride == null || (int)stride > 0)
{
if (start == null) { start = 0; }
if (stop == null) { stop = len; }
@@ -77,8 +77,8 @@ public static IEnumerable LazySlice(this IList source, int? start, int?
}
}
}
- istop = (stop < 0) ? len + (int)stop : (stop > len ? len : (int)stop);
- istart = (start < 0) ? len + (int)start : (int)start;
+ istop = ((int)stop < 0) ? len + (int)stop : ((int)stop > len ? len : (int)stop);
+ istart = ((int)start < 0) ? len + (int)start : (int)start;
istride = (stride == null) ? 1 : (int)stride;
if (istart >= len && istop >= len)
{
@@ -167,7 +167,7 @@ public static IEnumerable LazySlice(this IList source, int?[] slicer)
///
/// Allows use of Python-style slices, except that these create a copy of the sliced object rather than a view.
/// For higher performance at the cost of only producing an iterator and not a new iterable, use LazySlice.
- /// See the documentation for LazySlice with three int? arguments.
+ /// See the documentation for LazySlice with three int arguments.
///
///
///
@@ -184,7 +184,7 @@ public static IList Slice(this IList source, int? start, int? stop = nu
IList res;
int source_len = len;
int ind;
- if (stride == 0)
+ if (stride != null && (int)stride == 0)
{
throw new ArgumentException("The stride parameter of a slice must be a non-zero integer");
}
@@ -199,7 +199,7 @@ public static IList Slice(this IList source, int? start, int? stop = nu
stop = temp;
start = 0;
}
- else if (stride == null || stride > 0)
+ else if (stride == null || (int)stride > 0)
{
if (start == null) { start = 0; }
if (stop == null) { stop = len; }
@@ -253,8 +253,8 @@ public static IList Slice(this IList source, int? start, int? stop = nu
}
}
// make sure the start isn't higher than
- istop = (stop < 0) ? len + (int)stop : (stop > len ? len : (int)stop);
- istart = (start < 0) ? len + (int)start : (int)start;
+ istop = ((int)stop < 0) ? len + (int)stop : ((int)stop > len ? len : (int)stop);
+ istart = ((int)start < 0) ? len + (int)start : (int)start;
istride = (stride == null) ? 1 : (int)stride;
if (istart >= len && istop >= len)
{
@@ -328,7 +328,7 @@ public static IList Slice(this IList source, string slicer)
///
/// Allows use of Python-style slices, except that these create a copy of the sliced object rather than a view.
/// For higher performance at the cost of only producing an iterator and not a new iterable, use LazySlice.
- /// See the documentation for LazySlice with three int? arguments.
+ /// See the documentation for LazySlice with three int arguments.
///
///
///
@@ -361,7 +361,7 @@ public static string Slice(this string source, string slicer)
return new string((char[])source.ToCharArray().Slice(slicer));
}
- public static string Slice(this string source, int? start, int? stop, int? stride)
+ public static string Slice(this string source, int start, int stop, int stride)
{
return new string((char[])source.ToCharArray().Slice(start, stop, stride));
}
diff --git a/JsonToolsNppPlugin/dissassembly.txt b/JsonToolsNppPlugin/dissassembly.txt
new file mode 100644
index 0000000..2935f0f
--- /dev/null
+++ b/JsonToolsNppPlugin/dissassembly.txt
@@ -0,0 +1,1830 @@
+L_80000000: pop r10
+L_80000002: nop
+L_80000003: add [rbx], al
+L_80000005: add [rax], al
+L_80000007: add [rax+rax], al
+L_8000000A: add [rax], al
+L_8000000C: db 0xff
+L_8000000D: inc dword [rax]
+L_8000000F: add [rax+0x0], bh
+L_80000015: add [rax], al
+L_80000017: add [rax+0x0], al
+L_8000001A: add [rax], al
+L_8000001C: add [rax], al
+L_8000001E: add [rax], al
+L_80000020: add [rax], al
+L_80000022: add [rax], al
+L_80000024: add [rax], al
+L_80000026: add [rax], al
+L_80000028: add [rax], al
+L_8000002A: add [rax], al
+L_8000002C: add [rax], al
+L_8000002E: add [rax], al
+L_80000030: add [rax], al
+L_80000032: add [rax], al
+L_80000034: add [rax], al
+L_80000036: add [rax], al
+L_80000038: add [rax], al
+L_8000003A: add [rax], al
+L_8000003C: add byte [rax], 0x0
+L_8000003F: add [rsi], cl
+L_80000041: db 0x1f
+L_80000042: mov edx, 0x9b4000e
+L_80000047: int 0x21
+L_80000049: mov eax, 0x21cd4c01
+L_8000004E: push rsp
+L_8000004F: push 0x70207369
+L_80000054: jb 0x800000c5
+L_80000056: db 0x67
+L_80000057: jb 0x800000ba
+L_80000059: insd
+L_8000005A: and [rbx+0x61], ah
+L_8000005D: outsb
+L_8000005E: outsb
+L_8000005F: outsd
+L_80000060: jz 0x80000082
+L_80000062: db 0x62
+L_80000063: and gs:[rdx+0x75], dh
+L_80000067: outsb
+L_80000068: and [rcx+0x6e], ch
+L_8000006B: and [rdi+rcx*2+0x53], al
+L_8000006F: and [rbp+0x6f], ch
+L_80000072: db 0x64
+L_80000073: db 0x65
+L_80000074: db 0x2e
+L_80000075: or eax, 0x240a0d
+L_8000007A: add [rax], al
+L_8000007C: add [rax], al
+L_8000007E: add [rax], al
+L_80000080: push rax
+L_80000081: add [r8], r8b
+L_80000084: xchg fs:[rdx], al
+L_80000087: add [rcx+0x7f], ah
+L_8000008A: db 0x6
+L_8000008B: movzxd rax, [rax]
+L_8000008D: add [rax], al
+L_8000008F: add [rax], al
+L_80000091: add [rax], al
+L_80000093: add al, dh
+L_80000095: add [rdx], ah
+L_80000097: and [rbx], cl
+L_80000099: add dh, [rax]
+L_8000009B: add [rax], al
+L_8000009D: push 0x4000006
+L_800000A2: add [rax], al
+L_800000A4: add [rax], al
+L_800000A6: add [rax], al
+L_800000A8: add [rax], al
+L_800000AA: add [rax], al
+L_800000AC: add [rax], ah
+L_800000AE: add [rax], al
+L_800000B0: add [rax], al
+L_800000B2: add [rax+0x1], al
+L_800000B8: add [rax], ah
+L_800000BA: add [rax], al
+L_800000BC: add [rdx], al
+L_800000BE: add [rax], al
+L_800000C0: add al, 0x0
+L_800000C2: add [rax], al
+L_800000C4: add [rax], al
+L_800000C6: add [rax], al
+L_800000C8: add al, 0x0
+L_800000CA: add [rax], al
+L_800000CC: add [rax], al
+L_800000CE: add [rax], al
+L_800000D0: add al, al
+L_800000D2: db 0x6
+L_800000D3: add [rax], al
+L_800000D5: add al, [rax]
+L_800000D7: add [rax], al
+L_800000D9: add [rax], al
+L_800000DB: add [rbx], al
+L_800000DD: add [rax-0x7b], al
+L_800000E0: add [rax], al
+L_800000E2: add [rax], al
+L_800000E5: add [rax], al
+L_800000E7: add [rax], al
+L_800000E9: add [rax], al
+L_800000EC: add [rax], al
+L_800000EE: add [rax], al
+L_800000F0: add [rax], al
+L_800000F2: adc [rax], al
+L_800000F4: add [rax], al
+L_800000F6: add [rax], al
+L_800000F8: add [rax], ah
+L_800000FA: add [rax], al
+L_800000FC: add [rax], al
+L_800000FE: add [rax], al
+L_80000100: add [rax], al
+L_80000102: add [rax], al
+L_80000104: adc [rax], al
+L_80000106: add [rax], al
+L_80000108: add [rax], al
+L_8000010A: add [rax], al
+L_8000010C: add [rax], al
+L_8000010E: add [rax], al
+L_80000110: add [rax], al
+L_80000112: add [rax], al
+L_80000114: add [rax], al
+L_80000116: add [rax], al
+L_80000118: add [rax+0x3cc0006], ah
+L_8000011E: add [rax], al
+L_80000120: add [rax], al
+L_80000122: add [rax], al
+L_80000124: add [rax], al
+L_80000126: add [rax], al
+L_80000128: add [rax], al
+L_8000012A: add [rax], al
+L_8000012C: add [rax], al
+L_8000012E: add [rax], al
+L_80000130: add [rax], al
+L_80000132: add [rax], al
+L_80000134: add [rax], al
+L_80000136: add [rax], al
+L_80000138: xor [rsi+0x1c0006], al
+L_8000013E: add [rax], al
+L_80000140: add [rax], al
+L_80000142: add [rax], al
+L_80000144: add [rax], al
+L_80000146: add [rax], al
+L_80000148: add [rax], al
+L_8000014A: add [rax], al
+L_8000014C: add [rax], al
+L_8000014E: add [rax], al
+L_80000150: add [rax], al
+L_80000152: add [rax], al
+L_80000154: add [rax], al
+L_80000156: add [rax], al
+L_80000158: add [rax], al
+L_8000015A: add [rax], al
+L_8000015C: add [rax], al
+L_8000015E: add [rax], al
+L_80000160: add [rax], al
+L_80000162: add [rax], al
+L_80000164: add [rax], al
+L_80000166: add [rax], al
+L_80000168: add [rax], al
+L_8000016A: add [rax], al
+L_8000016C: add [rax], al
+L_8000016E: add [rax], al
+L_80000170: add [rax], al
+L_80000172: add [rax], al
+L_80000174: add [rax], al
+L_80000176: add [rax], al
+L_80000178: add [rax], ah
+L_8000017A: add [rax], al
+L_8000017C: add [rax], al
+L_8000017F: add [rax], al
+L_80000181: add [rax], al
+L_80000183: add [rax], al
+L_80000185: add [rax], al
+L_80000187: add [rsi], ch
+L_80000189: jz 0x800001f0
+L_8000018B: js 0x80000201
+L_8000018D: add [rax], al
+L_8000018F: add [rax+0x667], dh
+L_80000195: and [rax], al
+L_80000197: add [rax], al
+L_80000199: push 0x2000006
+L_8000019E: add [rax], al
+L_800001A0: add [rax], al
+L_800001A2: add [rax], al
+L_800001A4: add [rax], al
+L_800001A6: add [rax], al
+L_800001A8: add [rax], al
+L_800001AA: add [rax], al
+L_800001AC: and [rax], al
+L_800001AE: add [rax+0x2e], ah
+L_800001B1: jb 0x80000226
+L_800001B3: jb 0x80000218
+L_800001B5: add [rax], al
+L_800001B7: add ah, cl
+L_800001B9: add eax, [rax]
+L_800001BB: add [rax], al
+L_800001BD: mov al, [0x6a00000004000006]
+L_800001C6: db 0x6
+L_800001C7: add [rax], al
+L_800001C9: add [rax], al
+L_800001CB: add [rax], al
+L_800001CD: add [rax], al
+L_800001CF: add [rax], al
+L_800001D1: add [rax], al
+L_800001D3: add [rax+0x0], al
+L_800001D6: add [rax+0x0], al
+L_800001D9: add [rax], al
+L_800001DB: add [rax], al
+L_800001DD: add [rax], al
+L_800001DF: add [rax], al
+L_800001E1: add [rax], al
+L_800001E3: add [rax], al
+L_800001E5: add [rax], al
+L_800001E7: add [rax], al
+L_800001E9: add [rax], al
+L_800001EB: add [rax], al
+L_800001ED: add [rax], al
+L_800001EF: add [rax], al
+L_800001F1: add [rax], al
+L_800001F3: add [rax], al
+L_800001F5: add [rax], al
+L_800001F7: add [rax], al
+L_800001F9: add [rax], al
+L_800001FB: add [rax], al
+L_800001FD: add [rax], al
+L_800001FF: add [rax+0x0], cl
+L_80000202: add [rax], al
+L_80000204: add al, [rax]
+L_80000206: add eax, 0x23e9c00
+L_8000020B: add ah, bh
+L_8000020D: adc [rax+rax], eax
+L_80000210: add [rax], eax
+L_80000212: add [rax], al
+L_80000214: add [rax], al
+L_80000216: add [rax], al
+L_80000218: cwde
+L_80000219: push rax
+L_8000021A: db 0x6
+L_8000021B: add [rax+0x35], bl
+L_80000221: add [rax], al
+L_80000223: add [rax], al
+L_80000225: add [rax], al
+L_80000227: add [rax], al
+L_80000229: add [rax], al
+L_8000022B: add [rax], al
+L_8000022D: add [rax], al
+L_8000022F: add [rax], al
+L_80000231: add [rax], al
+L_80000233: add [rax], al
+L_80000235: add [rax], al
+L_80000237: add [rax], al
+L_80000239: add [rax], al
+L_8000023B: add [rax], al
+L_8000023D: add [rax], al
+L_8000023F: add [rax], al
+L_80000241: add [rax], al
+L_80000243: add [rax], al
+L_80000245: add [rax], al
+L_80000247: add [rsi], bl
+L_80000249: add bh, [rbx+0x3]
+L_8000024C: add [rax], al
+L_8000024E: add al, 0x2a
+L_80000250: and al, [rdx]
+L_80000252: add edi, [rbp+0x3]
+L_80000255: add [rax], al
+L_80000257: add al, 0x2a
+L_80000259: db 0x1e
+L_8000025A: add bh, [rbx+0x4]
+L_8000025D: add [rax], al
+L_8000025F: add al, 0x2a
+L_80000261: and al, [rdx]
+L_80000263: add edi, [rbp+0x4]
+L_80000266: add [rax], al
+L_80000268: add al, 0x2a
+L_8000026A: db 0x1e
+L_8000026B: add bh, [rbx+0x5]
+L_8000026E: add [rax], al
+L_80000270: add al, 0x2a
+L_80000272: and al, [rdx]
+L_80000274: add edi, [rbp+0x5]
+L_80000277: add [rax], al
+L_80000279: add al, 0x2a
+L_8000027B: db 0x1e
+L_8000027C: add bh, [rbx+0x6]
+L_8000027F: add [rax], al
+L_80000281: add al, 0x2a
+L_80000283: and al, [rdx]
+L_80000285: add edi, [rbp+0x6]
+L_80000288: add [rax], al
+L_8000028A: add al, 0x2a
+L_8000028C: db 0x1e
+L_8000028D: add bh, [rbx+0x7]
+L_80000290: add [rax], al
+L_80000292: add al, 0x2a
+L_80000294: and al, [rdx]
+L_80000296: add edi, [rbp+0x7]
+L_80000299: add [rax], al
+L_8000029B: add al, 0x2a
+L_8000029D: add [rax], al
+L_8000029F: add [rbx], dl
+L_800002A1: xor [rdi], al
+L_800002A3: add [rdi], ah
+L_800002A5: add al, [rax]
+L_800002A7: add [rcx], al
+L_800002A9: add [rax], al
+L_800002AB: adc [rbx-0x63], esi
+L_800002AE: db 0x7
+L_800002AF: add [rsi], al
+L_800002B1: or al, [rsi]
+L_800002B3: add bh, [rbp+0x7]
+L_800002B6: or al, 0x0
+L_800002B8: add al, 0x0
+L_800002BA: db 0x6
+L_800002BB: add ch, [rax]
+L_800002BD: db 0x26
+L_800002BE: add [rax], al
+L_800002C0: or dh, [rdx+rax+0x0]
+L_800002C4: add [rdx], al
+L_800002C6: jge 0x800002d0
+L_800002C8: or al, 0x0
+L_800002CA: add al, 0x6
+L_800002CC: jae 0x800002f5
+L_800002CE: add [rax], al
+L_800002D0: or ah, [rip+0x172]
+L_800002D6: jo 0x80000347
+L_800002D8: sub [rax], al
+L_800002DA: add [rdx], cl
+L_800002DC: add [rip+0x15e20], ah
+L_800002E2: add [rax], ah
+L_800002E4: ret 0x1
+L_800002E7: add [rbx+0x29], dh
+L_800002EA: add [rax], al
+L_800002EC: or ch, [rdi+0x2a]
+L_800002EF: add [rax], al
+L_800002F1: or al, [rax]
+L_800002F3: and eax, 0xfa20
+L_800002F8: add [rax], ah
+L_800002FA: cli
+L_800002FB: add [rax], al
+L_800002FD: add [rbx+0x29], dh
+L_80000300: add [rax], al
+L_80000302: or ch, [rdi+0x2b]
+L_80000305: add [rax], al
+L_80000307: or al, [rax]
+L_80000309: and eax, 0x2c6f16
+L_8000030E: add [rdx], cl
+L_80000310: add [rip+0x2d6f17], ah
+L_80000316: add [rdx], cl
+L_80000318: add [rip-0x3fffffde], ah
+L_8000031E: and al, [rax]
+L_80000321: add [rax+0x41], dl
+L_80000324: jae 0x80000354
+L_80000326: add [rax], al
+L_80000328: or ch, [rdi+0x2f]
+L_8000032B: add [rax], al
+L_8000032D: or al, [rax]
+L_8000032F: and eax, 0x306f16
+L_80000334: add [rdx], cl
+L_80000336: add [rip+0x316f1a], ah
+L_8000033C: add [rdx], cl
+L_8000033E: add [rip+0x326f], ah
+L_80000344: or dh, [rbx+0x33]
+L_80000347: add [rax], al
+L_80000349: or ah, [rip+0x3f72]
+L_8000034F: jo 0x800003c0
+L_80000351: xor al, 0x0
+L_80000353: add [rdx], cl
+L_80000355: add [rip+0x4d72], ah
+L_8000035B: jo 0x800003cc
+L_8000035D: sub [rax], al
+L_8000035F: add [rdx], cl
+L_80000361: add [rip+0x356f0a1f], ah
+L_80000367: add [rax], al
+L_80000369: or al, [rax]
+L_8000036B: and eax, 0x171f4b1f
+L_80000370: jae 0x8000039b
+L_80000372: add [rax], al
+L_80000374: or ch, [rdi+0x36]
+L_80000377: add [rax], al
+L_80000379: or al, [rax]
+L_8000037B: and eax, 0xd720
+L_80000380: add [rax], ah
+L_80000382: sahf
+L_80000383: add [rax], eax
+L_80000385: add [rbx+0x37], dh
+L_80000388: add [rax], al
+L_8000038A: or ch, [rdi+0x38]
+L_8000038D: add [rax], al
+L_8000038F: or al, [rax]
+L_80000391: and eax, 0x396f17
+L_80000396: add [rdx], cl
+L_80000398: add [rdi+0x3a], ch
+L_8000039B: add [rax], al
+L_8000039D: or al, [rax]
+L_8000039F: and eax, 0x326f
+L_800003A4: or dh, [rbx+0x33]
+L_800003A7: add [rax], al
+L_800003A9: or ah, [rip+0x5d72]
+L_800003AF: jo 0x80000420
+L_800003B1: xor al, 0x0
+L_800003B3: add [rdx], cl
+L_800003B5: add [rip+0x6372], ah
+L_800003BB: jo 0x8000042c
+L_800003BD: sub [rax], al
+L_800003BF: add [rdx], cl
+L_800003C1: add [rip+0x356f0a1f], ah
+L_800003C7: add [rax], al
+L_800003C9: or al, [rax]
+L_800003CB: and eax, 0x171f4b1f
+L_800003D0: jae 0x800003fb
+L_800003D2: add [rax], al
+L_800003D4: or ch, [rdi+0x36]
+L_800003D7: add [rax], al
+L_800003D9: or al, [rax]
+L_800003DB: and eax, -0x61dfbee1
+L_800003E0: add [rax], eax
+L_800003E2: add [rbx+0x37], dh
+L_800003E5: add [rax], al
+L_800003E7: or ch, [rdi+0x38]
+L_800003EA: add [rax], al
+L_800003EC: or al, [rax]
+L_800003EE: and eax, 0x396f17
+L_800003F3: add [rdx], cl
+L_800003F5: add [rdi+0x3a], ch
+L_800003F8: add [rax], al
+L_800003FA: or al, [rax]
+L_800003FC: and eax, 0x326f
+L_80000401: or dh, [rbx+0x3b]
+L_80000404: add [rax], al
+L_80000406: or ah, [rip+0x6b72]
+L_8000040C: jo 0x8000047d
+L_8000040E: xor al, 0x0
+L_80000410: add [rdx], cl
+L_80000412: add [rip+0x356f0f1f], ah
+L_80000418: add [rax], al
+L_8000041A: or al, [rax]
+L_8000041C: and eax, 0xd1f0d1f
+L_80000421: jae 0x8000045a
+L_80000423: add [rax], al
+L_80000425: or ch, [rdi+0x38]
+L_80000428: add [rax], al
+L_8000042A: or al, [rax]
+L_8000042C: and eax, 0x14420
+L_80000431: add [rax], ah
+L_80000433: mov eax, [rcx]
+L_80000435: add [rax], al
+L_80000437: jae 0x80000462
+L_80000439: add [rax], al
+L_8000043B: or ch, [rdi+0x36]
+L_8000043E: add [rax], al
+L_80000440: or al, [rax]
+L_80000442: and eax, 0x2d6f17
+L_80000447: add [rdx], cl
+L_80000449: add [rip-0x3fffffde], ah
+L_8000044F: and al, [rax]
+L_80000452: add [rax+0x41], dl
+L_80000455: jae 0x80000485
+L_80000457: add [rax], al
+L_80000459: or ch, [rdi+0x2f]
+L_8000045C: add [rax], al
+L_8000045E: or al, [rax]
+L_80000460: and eax, 0xc087b06
+L_80000465: add [rdi+rbp*2], al
+L_80000468: cmp al, 0x0
+L_8000046A: add [rdx], cl
+L_8000046C: add [rdi+0x3a], ch
+L_8000046F: add [rax], al
+L_80000471: or al, [rax]
+L_80000473: jge 0x8000047b
+L_80000475: or al, 0x0
+L_80000477: add al, 0x6
+L_80000479: jnp 0x80000481
+L_8000047B: or al, 0x0
+L_8000047D: add al, 0x6f
+L_8000047F: xor al, [rax]
+L_80000481: add [rdx], cl
+L_80000483: jb 0x800004c4
+L_80000485: add [rax], al
+L_80000487: jo 0x800004f8
+L_80000489: cmp eax, 0x60a0000
+L_8000048E: inc byte [rsi]
+L_80000490: sahf
+L_80000491: db 0x7
+L_80000492: add [rsi], al
+L_80000494: jae 0x800004d4
+L_80000496: add [rax], al
+L_80000498: or ch, [rdi+0x3f]
+L_8000049B: add [rax], al
+L_8000049D: or al, [rax]
+L_8000049F: db 0x6
+L_800004A0: jnp 0x800004a8
+L_800004A2: or al, 0x0
+L_800004A4: add al, 0x6f
+L_800004A6: xor al, [rax]
+L_800004A8: add [rdx], cl
+L_800004AA: jb 0x80000509
+L_800004AC: add [rax], al
+L_800004AE: jo 0x8000051f
+L_800004B0: cmp eax, 0x60a0000
+L_800004B5: inc byte [rsi]
+L_800004B7: lahf
+L_800004B8: db 0x7
+L_800004B9: add [rsi], al
+L_800004BB: jae 0x800004fb
+L_800004BD: add [rax], al
+L_800004BF: or ch, [rdi+0x3f]
+L_800004C2: add [rax], al
+L_800004C4: or al, [rax]
+L_800004C6: db 0x6
+L_800004C7: jnp 0x800004cf
+L_800004C9: or al, 0x0
+L_800004CB: add al, 0x6f
+L_800004CD: add [rax], al
+L_800004D0: or ah, [rsi]
+L_800004D2: sub ch, [rsi+0x37d1702]
+L_800004D8: add [rax], al
+L_800004DA: add al, 0x2
+L_800004DC: db 0x16
+L_800004DD: jge 0x800004e3
+L_800004DF: add [rax], al
+L_800004E1: add al, 0x2
+L_800004E3: db 0x16
+L_800004E4: jge 0x800004eb
+L_800004E6: add [rax], al
+L_800004E8: add al, 0x2
+L_800004EA: db 0x16
+L_800004EB: jge 0x800004f3
+L_800004ED: add [rax], al
+L_800004EF: add al, 0x2
+L_800004F1: db 0x16
+L_800004F2: jge 0x800004fb
+L_800004F4: add [rax], al
+L_800004F6: add al, 0x2
+L_800004F8: sub [rcx+0x0], al
+L_800004FB: add [rdx], cl
+L_800004FD: add [rdx], ch
+L_800004FF: add [rbx], dl
+L_80000501: xor [rbx], al
+L_80000503: add [rdx+0x2000000], bl
+L_80000509: add [rax], al
+L_8000050B: adc [rdx], eax
+L_8000050D: sub [rdx+0x0], al
+L_80000510: add [rdx], cl
+L_80000512: add [rax], al
+L_80000514: add al, [rbx]
+L_80000516: jge 0x80000521
+L_80000518: add [rax], al
+L_8000051A: add al, 0x2
+L_8000051C: add al, 0x7d
+L_8000051E: or al, [rax]
+L_80000520: add [rdx+rax], al
+L_80000523: jae 0x80000568
+L_80000525: add [rax], al
+L_80000527: or bh, [rbp+0x8]
+L_8000052A: add [rax], al
+L_8000052C: add al, 0x0
+L_8000052E: rol byte [rdx], 0x1
+L_80000530: add [rax], al
+L_80000532: add ch, [rax]
+L_80000534: add [rax], r8b
+L_80000537: or ch, [rax]
+L_80000539: add [r8], r8b
+L_8000053C: or cl, [rdx]
+L_8000053E: db 0x16
+L_8000053F: or ebp, [rbx]
+L_80000541: pop rbp
+L_80000542: db 0x6
+L_80000543: db 0x7
+L_80000544: db 0x9a
+L_80000545: or al, 0x0
+L_80000547: or [rbx], al
+L_80000549: adc al, 0x6f
+L_8000054B: add [rax], r8b
+L_8000054E: or cl, [rip+0x6f140408]
+L_80000554: add [rax], r8b
+L_80000557: or dl, [rbx]
+L_80000559: add al, 0x9
+L_8000055B: adc [rdi+rbp*2], eax
+L_8000055E: add [r8], r8b
+L_80000561: or dl, [rsi]
+L_80000563: inc byte [rcx]
+L_80000565: adc eax, [rip+0x2f2c0511]
+L_8000056B: add [rdx+0x75], dh
+L_8000056E: add [rax], al
+L_80000570: jo 0x8000057a
+L_80000572: outsd
+L_80000573: add [rax], al
+L_80000576: or dh, [rdx-0x79]
+L_80000579: add [rax], al
+L_8000057B: jo 0x800005a5
+L_8000057D: add [r8], al
+L_80000580: or ch, [rax]
+L_80000582: add [rax], al
+L_80000585: or al, [rax]
+L_80000587: add ch, [rax]
+L_80000589: db 0xe
+L_8000058A: add [rax], al
+L_8000058C: db 0x6
+L_8000058D: or [rdi+0x48], ch
+L_80000590: add [rax], al
+L_80000592: or ch, [rdi+0x4b]
+L_80000595: add [rax], al
+L_80000597: or ah, [rsi]
+L_80000599: add [rax], al
+L_8000059B: db 0x7
+L_8000059C: db 0x17
+L_8000059D: pop rax
+L_8000059E: or eax, [rdi]
+L_800005A0: db 0x6
+L_800005A1: mov gs, [rcx+0x32]
+L_800005A4: popf
+L_800005A5: sub bl, [rsi]
+L_800005A7: add bh, [rbx+0x8]
+L_800005AA: add [rax], al
+L_800005AC: add al, 0x2a
+L_800005AE: db 0x1e
+L_800005AF: add bh, [rbx+0x9]
+L_800005B2: add [rax], al
+L_800005B4: add al, 0x2a
+L_800005B6: db 0x1e
+L_800005B7: add bh, [rbx+0xa]
+L_800005BA: add [rax], al
+L_800005BC: add al, 0x2a
+L_800005BE: xchg edx, eax
+L_800005BF: db 0x1f
+L_800005C0: db 0xfe
+L_800005C1: jae 0x8000060f
+L_800005C3: add [rax], al
+L_800005C5: or ah, [rip+0x4d7d02]
+L_800005CB: add [rdx], cl
+L_800005CD: and eax, 0x4e7d03
+L_800005D2: add [rdx], cl
+L_800005D4: and eax, 0x4f7d04
+L_800005D9: add [rdx], cl
+L_800005DB: and eax, 0x507d05
+L_800005E0: add [rdx], cl
+L_800005E2: sub bl, [rdx+0x1f]
+L_800005E5: db 0xfe
+L_800005E6: jae 0x80000639
+L_800005E8: add [rax], al
+L_800005EA: or ah, [rip+0x527d02]
+L_800005F0: add [rdx], cl
+L_800005F2: and eax, 0x537d03
+L_800005F7: add [rdx], cl
+L_800005F9: sub bl, [rdx+0x1f]
+L_800005FC: db 0xfe
+L_800005FD: jae 0x80000653
+L_800005FF: add [rax], al
+L_80000601: or ah, [rip+0x557d02]
+L_80000607: add [rdx], cl
+L_80000609: and eax, 0x567d03
+L_8000060E: add [rdx], cl
+L_80000610: sub al, [rax]
+L_80000612: add [rax], al
+L_80000614: adc esi, [rax]
+L_80000616: add al, 0x0
+L_80000618: ret
+L_80000619: add eax, [rax]
+L_8000061B: add [rbx], al
+L_8000061D: add [rax], al
+L_8000061F: adc [rax], eax
+L_80000621: add dh, [rbp+0xa]
+L_80000624: add [rax], al
+L_80000626: sbb edx, [rsi+rdi*8]
+L_80000629: add ecx, [rdx]
+L_8000062B: add dh, [rbp+0xb]
+L_8000062E: add [rax], al
+L_80000630: sbb edx, [rsi+rdi*8]
+L_80000633: add ecx, [rbx]
+L_80000635: db 0x6
+L_80000636: sub eax, 0xb2d071b
+L_8000063B: jb 0x800005de
+L_8000063D: add [rax], al
+L_8000063F: jo 0x800006b4
+L_80000641: push rdi
+L_80000642: add [rax], al
+L_80000644: or bh, [rdx+0x2]
+L_80000647: jz 0x80000654
+L_80000649: add [rax], al
+L_8000064B: sbb ebp, [rdi+0x58]
+L_8000064E: add [rax], al
+L_80000650: or ch, [rbx]
+L_80000652: or [rdx], al
+L_80000654: jz 0x80000660
+L_80000656: add [rax], al
+L_80000658: sbb ecx, [rsi+0x13080c69]
+L_8000065E: add al, 0xf
+L_80000660: add ebp, [rax]
+L_80000662: pop rcx
+L_80000663: add [rax], al
+L_80000665: or ch, [rsp+rcx]
+L_80000668: lsl ebp, [rax]
+L_8000066B: pop rdx
+L_8000066C: add [rax], al
+L_8000066E: or dl, [rsi]
+L_80000670: inc byte [rcx]
+L_80000672: sub eax, [rcx]
+L_80000674: db 0x16
+L_80000675: adc ecx, [rcx]
+L_80000677: adc [rcx], ecx
+L_80000679: sub al, 0xc
+L_8000067B: add [rdx+0x74], dh
+L_8000067E: add [rax], eax
+L_80000680: jo 0x800006f5
+L_80000682: pop rbx
+L_80000683: add [rax], al
+L_80000685: or bh, [rdx+0x8]
+L_80000688: db 0x16
+L_80000689: inc byte [rdx]
+L_8000068B: db 0x16
+L_8000068C: inc byte [rcx]
+L_8000068E: adc ecx, [rdx]
+L_80000690: adc [rdx], ecx
+L_80000692: sub al, 0x21
+L_80000694: add [rsi], al
+L_80000696: adc ecx, [rbx]
+L_80000698: adc [rbx], ecx
+L_8000069A: sub al, 0xd
+L_8000069C: db 0x16
+L_8000069D: lea ecx, [rip+0x131b0000]
+L_800006A3: or al, 0x38
+L_800006A5: db 0x37
+L_800006A6: add eax, [rax]
+L_800006A8: add [rbx+0x5c], dh
+L_800006AB: add [rax], al
+L_800006AD: or dl, [rbx]
+L_800006AF: or al, 0x38
+L_800006B1: sub eax, [rbx]
+L_800006B3: add [rax], al
+L_800006B5: db 0xf
+L_800006B6: add [rax], ebp
+L_800006B8: pop rcx
+L_800006B9: add [rax], al
+L_800006BB: or ch, [rdx+0x5928020f]
+L_800006C2: add [rax], al
+L_800006C4: or ch, [rip+0x28030f0c]
+L_800006CA: pop rcx
+L_800006CB: add [rax], al
+L_800006CD: or dl, [rsi]
+L_800006CF: inc byte [rcx]
+L_800006D1: sub eax, [rcx]
+L_800006D3: db 0x16
+L_800006D4: adc ecx, [rip+0x212c0d11]
+L_800006DA: add [rdi], cl
+L_800006DC: add [rax], ebp
+L_800006DE: pop rdx
+L_800006DF: add [rax], al
+L_800006E1: or dl, [rbx]
+L_800006E3: db 0xe
+L_800006E4: lar edx, [rcx]
+L_800006E7: db 0xe
+L_800006E8: sub [rbp+0x0], bl
+L_800006EB: add [rdx], cl
+L_800006ED: lgdt qword [rsi]
+L_800006F0: sub [rbp+0x0], bl
+L_800006F3: add [rdx], cl
+L_800006F5: add [rax], bh
+L_800006F7: outsd
+L_800006F8: add [rax], eax
+L_800006FA: add [rdi], cl
+L_800006FC: add ebp, [rax]
+L_800006FE: pop rcx
+L_800006FF: add [rax], al
+L_80000701: or ch, [rsp+rcx]
+L_80000704: lsl ebp, [rax]
+L_80000707: pop rdx
+L_80000708: add [rax], al
+L_8000070A: or dl, [rsi]
+L_8000070C: inc byte [rdx]
+L_8000070E: sub eax, [rcx]
+L_80000710: db 0x17
+L_80000711: adc ecx, [rdi]
+L_80000713: adc [rdi], ecx
+L_80000715: sub al, 0x3b
+L_80000717: add [rdi], cl
+L_80000719: add [rax], ebp
+L_8000071B: pop rcx
+L_8000071C: add [rax], al
+L_8000071E: or dl, [rsi]
+L_80000720: inc byte [rcx]
+L_80000722: adc edx, [rax]
+L_80000724: adc [rax], edx
+L_80000726: sub al, 0xa
+L_80000728: add [rdi], cl
+L_8000072A: add [rsi], edx
+L_8000072C: sub [rbp+0x0], bl
+L_8000072F: add [rdx], cl
+L_80000731: add [rdi], cl
+L_80000733: add ch, [rax]
+L_80000735: pop rcx
+L_80000736: add [rax], al
+L_80000738: or dl, [rsi]
+L_8000073A: inc byte [rcx]
+L_8000073C: adc edx, [rcx]
+L_8000073E: adc [rcx], edx
+L_80000740: sub al, 0xa
+L_80000742: add [rdi], cl
+L_80000744: add cl, [rax]
+L_80000746: sub [rbp+0x0], bl
+L_80000749: add [rdx], cl
+L_8000074B: add [rax], al
+L_8000074D: cmp [rax], bl
+L_8000074F: add [rax], eax
+L_80000751: add [rax], al
+L_80000753: db 0xf
+L_80000754: add [rax], ebp
+L_80000756: pop rcx
+L_80000757: add [rax], al
+L_80000759: or dl, [rsi]
+L_8000075B: inc byte [rcx]
+L_8000075D: adc edx, [rdx]
+L_8000075F: adc [rdx], edx
+L_80000761: sub al, 0xc
+L_80000763: add [rdi], cl
+L_80000765: add [rax], ecx
+L_80000767: db 0x17
+L_80000768: pop rcx
+L_80000769: sub [rbp+0x0], bl
+L_8000076C: add [rdx], cl
+L_8000076E: add [rdi], cl
+L_80000770: add ch, [rax]
+L_80000772: pop rcx
+L_80000773: add [rax], al
+L_80000775: or dl, [rsi]
+L_80000777: inc byte [rcx]
+L_80000779: adc edx, [rbx]
+L_8000077B: adc [rbx], edx
+L_8000077D: cmp edi, esp
+L_8000077F: add [rax], al
+L_80000781: add [rax], al
+L_80000783: lsl ebp, [rax]
+L_80000786: pop rdx
+L_80000787: add [rax], al
+L_80000789: or dl, [rbx]
+L_8000078B: or [rdi], cl
+L_8000078D: add [rax], ebp
+L_8000078F: pop rdx
+L_80000790: add [rax], al
+L_80000792: or dl, [rbx]
+L_80000794: db 0x6
+L_80000795: adc eax, 0x6110713
+L_8000079A: db 0x16
+L_8000079B: inc byte [rbx+rdx]
+L_8000079E: adc al, 0x11
+L_800007A0: adc al, 0x2c
+L_800007A2: or al, [rax]
+L_800007A4: adc [rsi], eax
+L_800007A6: or [rax+0x13], bl
+L_800007A9: db 0x6
+L_800007AA: add [rbx], ch
+L_800007AC: adc eax, -0x1f7f9ef
+L_800007B1: add al, 0x16
+L_800007B3: inc byte [rcx]
+L_800007B5: adc edx, [rip+0x72c1511]
+L_800007BB: add [rax], cl
+L_800007BD: db 0x17
+L_800007BE: pop rcx
+L_800007BF: adc eax, [rsi]
+L_800007C1: add [rcx], dl
+L_800007C3: db 0x7
+L_800007C4: adc [rsi], eax
+L_800007C6: pop rcx
+L_800007C7: adc [rax], ecx
+L_800007C9: pop rbx
+L_800007CA: db 0x17
+L_800007CB: pop rax
+L_800007CC: or al, 0x8
+L_800007CE: adc [rax], ecx
+L_800007D0: pop rbp
+L_800007D1: db 0x16
+L_800007D2: inc byte [rcx]
+L_800007D4: adc edx, [rsi]
+L_800007D6: adc [rsi], edx
+L_800007D8: sub al, 0x6
+L_800007DA: add [rax], cl
+L_800007DC: db 0x17
+L_800007DD: pop rcx
+L_800007DE: or al, 0x0
+L_800007E0: db 0x6
+L_800007E1: adc edx, [rdi]
+L_800007E3: adc [rdi], edx
+L_800007E5: sub al, 0x43
+L_800007E7: add [rax], cl
+L_800007E9: lea ecx, [rip+0xd1b0000]
+L_800007EF: db 0x16
+L_800007F0: adc ebx, [rax]
+L_800007F2: sub esp, [rbx]
+L_800007F4: add [rcx], dl
+L_800007F6: db 0x6
+L_800007F7: adc [rax], ebx
+L_800007F9: adc [rax], ecx
+L_800007FB: pop rdx
+L_800007FC: pop rax
+L_800007FD: adc eax, [rip+0x2181109]
+L_80000803: adc [rip+0x5e6f], eax
+L_80000809: or ch, [rdi+0x5f]
+L_8000080C: add [rax], al
+L_8000080E: or al, [rax]
+L_80000810: add [rcx], dl
+L_80000812: sbb [rdi], dl
+L_80000814: pop rax
+L_80000815: adc ebx, [rax]
+L_80000817: adc [rax], ebx
+L_80000819: or dh, bh
+L_8000081B: add al, 0x13
+L_8000081D: sbb [rcx], edx
+L_8000081F: sbb [rip+0xc1309d2], ebp
+L_80000825: cmp [rsi+0x73000001], dh
+L_8000082B: pop rsp
+L_8000082C: add [rax], al
+L_8000082E: or cl, [rip+0x2b1a1316]
+L_80000834: and [rax], eax
+L_80000836: adc [rsi], eax
+L_80000838: adc [rdx], ebx
+L_8000083A: adc [rax], ecx
+L_8000083C: pop rdx
+L_8000083D: pop rax
+L_8000083E: adc eax, [rip+0x5110209]
+L_80000844: outsd
+L_80000845: pop rsi
+L_80000846: add [rax], al
+L_80000848: or ch, [rdi+0x60]
+L_8000084B: add [rax], al
+L_8000084D: or al, [rax]
+L_8000084F: add [rcx], dl
+L_80000851: sbb dl, [rdi]
+L_80000853: pop rax
+L_80000854: adc ebx, [rdx]
+L_80000856: adc [rdx], ebx
+L_80000858: or dh, bh
+L_8000085A: add al, 0x13
+L_8000085C: sbb edx, [rcx]
+L_8000085E: sbb ebp, [rip+0xc1309d4]
+L_80000864: cmp [rdi+0x1], dh
+L_80000867: add [rax], al
+L_80000869: add [rdi], cl
+L_8000086B: add ch, [rax]
+L_8000086D: pop rdx
+L_8000086E: add [rax], al
+L_80000870: or dl, [rsi]
+L_80000872: xor dl, [rsi]
+L_80000874: lar ebp, [rax]
+L_80000877: pop rdx
+L_80000878: add [rax], al
+L_8000087A: or cl, [rax]
+L_8000087C: xor [rcx], cl
+L_8000087E: lar ebp, [rax]
+L_80000881: pop rdx
+L_80000882: add [rax], al
+L_80000884: or ch, [rbx]
+L_80000886: add [rax], ecx
+L_80000888: sub ecx, [rcx]
+L_8000088A: or [rdi], cl
+L_8000088C: add ch, [rax]
+L_8000088E: pop rdx
+L_8000088F: add [rax], al
+L_80000891: or bl, [rax+0x13]
+L_80000894: db 0x7
+L_80000895: db 0xf
+L_80000896: add [rax], ebp
+L_80000898: pop rdx
+L_80000899: add [rax], al
+L_8000089B: or dl, [rsi]
+L_8000089D: xor cl, [rcx]
+L_8000089F: db 0xf
+L_800008A0: add [rax], ebp
+L_800008A2: pop rdx
+L_800008A3: add [rax], al
+L_800008A5: or ch, [rbx]
+L_800008A7: or [rax], ecx
+L_800008A9: db 0xf
+L_800008AA: add [rax], ebp
+L_800008AC: pop rdx
+L_800008AD: add [rax], al
+L_800008AF: or bl, [rax+0x13]
+L_800008B2: db 0x6
+L_800008B3: lsl ebp, [rax]
+L_800008B6: pop rcx
+L_800008B7: add [rax], al
+L_800008B9: or ch, [rcx+rcx]
+L_800008BC: lsl ebp, [rax]
+L_800008BF: pop rdx
+L_800008C0: add [rax], al
+L_800008C2: or ch, [rbx]
+L_800008C4: add [rdi], edx
+L_800008C6: adc ecx, [rax]
+L_800008C8: adc [rsi], eax
+L_800008CA: or [rdx], dh
+L_800008CC: or dl, [rcx]
+L_800008CE: db 0x7
+L_800008CF: or dh, bh
+L_800008D1: add al, 0x16
+L_800008D3: inc byte [rcx]
+L_800008D5: sub eax, [rcx]
+L_800008D7: db 0x16
+L_800008D8: adc ebx, [rcx+rdx]
+L_800008DB: sbb al, 0x2c
+L_800008DD: and [rax], eax
+L_800008DF: db 0x6
+L_800008E0: adc ebx, [rip+0xd2c1d11]
+L_800008E6: db 0x16
+L_800008E7: lea ecx, [rip+0x131b0000]
+L_800008ED: or al, 0x38
+L_800008EF: in eax, dx
+L_800008F0: add [rax], al
+L_800008F2: add [rbx+0x5c], dh
+L_800008F5: add [rax], al
+L_800008F7: or dl, [rbx]
+L_800008F9: or al, 0x38
+L_800008FB: loopz 0x800008fd
+L_800008FD: add [rax], al
+L_800008FF: adc [rdi], eax
+L_80000901: adc [rsi], eax
+L_80000903: pop rcx
+L_80000904: adc [rax], ecx
+L_80000906: sub [rcx+0x0], ah
+L_80000909: add [rdx], cl
+L_8000090B: pop rbp
+L_8000090C: db 0x16
+L_8000090D: inc byte [rcx]
+L_8000090F: adc ebx, [rsi]
+L_80000911: adc [rsi], ebx
+L_80000913: sub al, 0xd
+L_80000915: add [rcx], dl
+L_80000917: db 0x7
+L_80000918: adc [rsi], eax
+L_8000091A: pop rcx
+L_8000091B: adc [rax], ecx
+L_8000091D: pop rbx
+L_8000091E: or al, 0x0
+L_80000920: sub ecx, [rip+0x11071100]
+L_80000926: db 0x6
+L_80000927: pop rcx
+L_80000928: adc [rax], ecx
+L_8000092A: pop rbx
+L_8000092B: db 0x17
+L_8000092C: pop rax
+L_8000092D: or al, 0x0
+L_8000092F: or [rsi], dl
+L_80000931: inc byte [rdx]
+L_80000933: db 0x16
+L_80000934: inc byte [rcx]
+L_80000936: adc ebx, [rdi]
+L_80000938: adc [rdi], ebx
+L_8000093A: sub al, 0x21
+L_8000093C: add [rsi], al
+L_8000093E: adc esp, [rax]
+L_80000940: adc [rax], esp
+L_80000942: sub al, 0xd
+L_80000944: db 0x16
+L_80000945: lea ecx, [rip+0x131b0000]
+L_8000094B: or al, 0x38
+L_8000094D: pop qword [rax]
+L_8000094F: add [rax], al
+L_80000951: jae 0x800009af
+L_80000953: add [rax], al
+L_80000955: or dl, [rbx]
+L_80000957: or al, 0x38
+L_80000959: add dword [rax], 0x0
+L_8000095C: add [rsi], al
+L_8000095E: adc esp, [rcx]
+L_80000960: adc [rcx], esp
+L_80000962: sub al, 0x40
+L_80000964: add [rax], cl
+L_80000966: lea ecx, [rip+0xd1b0000]
+L_8000096C: db 0x16
+L_8000096D: adc esp, [rdx]
+L_8000096F: sub esp, [rbx]
+L_80000971: add [rcx], dl
+L_80000973: db 0x6
+L_80000974: adc [rdx], esp
+L_80000976: adc [rax], ecx
+L_80000978: pop rdx
+L_80000979: pop rax
+L_8000097A: adc eax, [rip+0x2221109]
+L_80000980: adc [rip+0x5e6f], eax
+L_80000986: or ch, [rdi+0x5f]
+L_80000989: add [rax], al
+L_8000098B: or al, [rax]
+L_8000098D: add [rcx], dl
+L_8000098F: and dl, [rdi]
+L_80000991: pop rax
+L_80000992: adc esp, [rdx]
+L_80000994: adc [rdx], esp
+L_80000996: or dh, bh
+L_80000998: add al, 0x13
+L_8000099A: and edx, [rcx]
+L_8000099C: and ebp, [rip+0xc1309d2]
+L_800009A2: sub edi, [rbx+rsi*2]
+L_800009A5: pop rsp
+L_800009A6: add [rax], al
+L_800009A8: or cl, [rip+0x2b241316]
+L_800009AE: and [rax], eax
+L_800009B0: adc [rsi], eax
+L_800009B2: adc [rcx+rdx], esp
+L_800009B5: or [rdx+0x58], bl
+L_800009B8: adc eax, [rip+0x5110209]
+L_800009BE: outsd
+L_800009BF: pop rsi
+L_800009C0: add [rax], al
+L_800009C2: or ch, [rdi+0x60]
+L_800009C5: add [rax], al
+L_800009C7: or al, [rax]
+L_800009C9: add [rcx], dl
+L_800009CB: and al, 0x17
+L_800009CD: pop rax
+L_800009CE: adc esp, [rcx+rdx]
+L_800009D1: and al, 0x8
+L_800009D3: inc byte [rbx+rdx]
+L_800009D6: and eax, -0x2bd2daef
+L_800009DB: or [rbx], edx
+L_800009DD: or al, 0x2b
+L_800009DF: add [rcx], dl
+L_800009E1: or al, 0x2a
+L_800009E3: add [rbx], dl
+L_800009E5: xor [rip+0x15d00], al
+L_800009EB: add [rax+rax], al
+L_800009EE: add [rcx], dl
+L_800009F0: add [rbx], al
+L_800009F2: db 0x17
+L_800009F3: lea edi, [rdx+0x0]
+L_800009F6: add [rcx], al
+L_800009F8: and eax, -0x62c5e0ea
+L_800009FD: outsd
+L_800009FE: db 0x62
+L_800009FF: add [rax], al
+L_80000A01: or cl, [rdx]
+L_80000A03: adc al, [rcx]
+L_80000A05: db 0x16
+L_80000A06: sub [rbp+0x0], bl
+L_80000A09: add [rdx], cl
+L_80000A0B: adc al, [rdx]
+L_80000A0D: db 0x16
+L_80000A0E: sub [rbp+0x0], bl
+L_80000A11: add [rdx], cl
+L_80000A13: adc al, [rbx]
+L_80000A15: db 0x17
+L_80000A16: sub [rbp+0x0], bl
+L_80000A19: add [rdx], cl
+L_80000A1B: db 0x6
+L_80000A1C: mov gs, [rcx+0x13]
+L_80000A1F: add eax, 0x4130511
+L_80000A24: adc [rdi+rdx], eax
+L_80000A27: pop rcx
+L_80000A28: add r8d, [r8]
+L_80000A2B: add [rax], al
+L_80000A2D: add eax, 0x21000000
+L_80000A32: add [rax], al
+L_80000A34: add [rsi+0x0], bh
+L_80000A37: add [rax], al
+L_80000A39: cmp bh, bh
+L_80000A3B: add [rax], al
+L_80000A3D: add [rdx], dl
+L_80000A3F: add [rsi], edx
+L_80000A41: sub [rbp+0x0], bl
+L_80000A44: add [rdx], cl
+L_80000A46: adc al, [rdx]
+L_80000A48: db 0x6
+L_80000A49: db 0x16
+L_80000A4A: db 0x9a
+L_80000A4B: sub [rbx+0x0], ah
+L_80000A4E: add [rdx], cl
+L_80000A50: sub [rbp+0x0], bl
+L_80000A53: add [rdx], cl
+L_80000A55: cmp bl, ah
+L_80000A57: add [rax], al
+L_80000A59: add [rsi], al
+L_80000A5B: db 0x16
+L_80000A5C: db 0x9a
+L_80000A5D: jb 0x80000a49
+L_80000A5F: add [rax], eax
+L_80000A61: jo 0x80000a8b
+L_80000A63: add fs:[rax], al
+L_80000A66: or dl, [rbx]
+L_80000A68: db 0x6
+L_80000A69: adc [rsi], eax
+L_80000A6B: sub al, 0xa
+L_80000A6D: adc al, [rcx]
+L_80000A6F: db 0xfe
+L_80000A70: adc eax, 0x1b00000c
+L_80000A75: sub ecx, [rdi]
+L_80000A77: adc al, [rcx]
+L_80000A79: db 0x6
+L_80000A7A: db 0x16
+L_80000A7B: db 0x9a
+L_80000A7C: sub [rbx+0x0], ah
+L_80000A7F: add [rdx], cl
+L_80000A81: sub [rbp+0x0], bl
+L_80000A84: add [rdx], cl
+L_80000A86: db 0x6
+L_80000A87: db 0x17
+L_80000A88: db 0x9a
+L_80000A89: jb 0x80000a75
+L_80000A8B: add [rax], eax
+L_80000A8D: jo 0x80000ab7
+L_80000A8F: add fs:[rax], al
+L_80000A92: or dl, [rbx]
+L_80000A94: db 0x7
+L_80000A95: adc [rdi], eax
+L_80000A97: sub al, 0xa
+L_80000A99: adc al, [rdx]
+L_80000A9B: db 0xfe
+L_80000A9C: adc eax, 0x1b00000c
+L_80000AA1: sub ecx, [rdi]
+L_80000AA3: adc al, [rdx]
+L_80000AA5: db 0x6
+L_80000AA6: db 0x17
+L_80000AA7: db 0x9a
+L_80000AA8: sub [rbx+0x0], ah
+L_80000AAB: add [rdx], cl
+L_80000AAD: sub [rbp+0x0], bl
+L_80000AB0: add [rdx], cl
+L_80000AB2: cmp [rsi+0x6000000], al
+L_80000AB8: db 0x16
+L_80000AB9: db 0x9a
+L_80000ABA: jb 0x80000aa6
+L_80000ABC: add [rax], eax
+L_80000ABE: jo 0x80000ae8
+L_80000AC0: add fs:[rax], al
+L_80000AC3: or dl, [rbx]
+L_80000AC5: or [rcx], dl
+L_80000AC7: or [rdx+rcx], ch
+L_80000ACA: adc al, [rcx]
+L_80000ACC: db 0xfe
+L_80000ACD: adc eax, 0x1b00000c
+L_80000AD2: sub ecx, [rdi]
+L_80000AD4: adc al, [rcx]
+L_80000AD6: db 0x6
+L_80000AD7: db 0x16
+L_80000AD8: db 0x9a
+L_80000AD9: sub [rbx+0x0], ah
+L_80000ADC: add [rdx], cl
+L_80000ADE: sub [rbp+0x0], bl
+L_80000AE1: add [rdx], cl
+L_80000AE3: db 0x6
+L_80000AE4: db 0x17
+L_80000AE5: db 0x9a
+L_80000AE6: jb 0x80000ad2
+L_80000AE8: add [rax], eax
+L_80000AEA: jo 0x80000b14
+L_80000AEC: add fs:[rax], al
+L_80000AEF: or dl, [rbx]
+L_80000AF1: or [rcx], edx
+L_80000AF3: or [rdx+rcx], ebp
+L_80000AF6: adc al, [rdx]
+L_80000AF8: db 0xfe
+L_80000AF9: adc eax, 0x1b00000c
+L_80000AFE: sub ecx, [rdi]
+L_80000B00: adc al, [rdx]
+L_80000B02: db 0x6
+L_80000B03: db 0x17
+L_80000B04: db 0x9a
+L_80000B05: sub [rbx+0x0], ah
+L_80000B08: add [rdx], cl
+L_80000B0A: sub [rbp+0x0], bl
+L_80000B0D: add [rdx], cl
+L_80000B0F: db 0x6
+L_80000B10: sbb [rdx+0x1ea72], bl
+L_80000B16: jo 0x80000b40
+L_80000B18: add fs:[rax], al
+L_80000B1B: or dl, [rbx]
+L_80000B1D: or dl, [rcx]
+L_80000B1F: or ch, [rdx+rcx]
+L_80000B22: adc al, [rbx]
+L_80000B24: db 0xfe
+L_80000B25: adc eax, 0x1b00000c
+L_80000B2A: sub ecx, [rdi]
+L_80000B2C: adc al, [rbx]
+L_80000B2E: db 0x6
+L_80000B2F: sbb [rdx+0x6328], bl
+L_80000B35: or ch, [rax]
+L_80000B37: pop rbp
+L_80000B38: add [rax], al
+L_80000B3A: or ch, [rbx]
+L_80000B3C: add [rdx], al
+L_80000B3E: db 0x7
+L_80000B3F: or [rcx], cl
+L_80000B41: sub [rcx], al
+L_80000B43: add [rax], al
+L_80000B45: sub edx, [rbx]
+L_80000B47: or ebp, [rbx]
+L_80000B49: add [rcx], dl
+L_80000B4B: or ebp, [rdx]
+L_80000B4D: add [rax], al
+L_80000B4F: add [rbx], dl
+L_80000B51: xor [rax+rax], al
+L_80000B54: test [rax], eax
+L_80000B56: add [rax], al
+L_80000B58: add eax, 0x110000
+L_80000B5D: adc al, [rax]
+L_80000B5F: db 0x16
+L_80000B60: sub [rbp+0x0], bl
+L_80000B63: add [rdx], cl
+L_80000B65: adc al, [rcx]
+L_80000B67: db 0x16
+L_80000B68: sub [rbp+0x0], bl
+L_80000B6B: add [rdx], cl
+L_80000B6D: adc al, [rdx]
+L_80000B6F: db 0x17
+L_80000B70: sub [rbp+0x0], bl
+L_80000B73: add [rdx], cl
+L_80000B75: add ecx, [rsi+0x11041369]
+L_80000B7B: add al, 0xd
+L_80000B7D: or [rdi], edx
+L_80000B7F: pop rcx
+L_80000B80: add r8d, [r8]
+L_80000B83: add [rax], al
+L_80000B85: add al, [rax]
+L_80000B87: add [rax], al
+L_80000B89: adc al, 0x0
+L_80000B8B: add [rax], al
+L_80000B8D: db 0x26
+L_80000B8E: add [rax], al
+L_80000B90: add [rbx], ch
+L_80000B92: db 0x3e
+L_80000B93: adc al, [rax]
+L_80000B95: db 0x16
+L_80000B96: sub [rbp+0x0], bl
+L_80000B99: add [rdx], cl
+L_80000B9B: add edx, [rsi]
+L_80000B9D: mov [0x32c2b0b1b00000c], eax
+L_80000BA6: db 0x16
+L_80000BA7: mov [0xa317030a1b00000c], eax
+L_80000BB0: or al, 0x0
+L_80000BB2: add [rbx], bl
+L_80000BB4: or ebp, [rbx]
+L_80000BB6: sbb al, [rbx]
+L_80000BB8: db 0x16
+L_80000BB9: mov [0xa317030a1b00000c], eax
+L_80000BC2: or al, 0x0
+L_80000BC4: add [rbx], bl
+L_80000BC6: or eax, [rbx]
+L_80000BC8: sbb [rbx+0x1b00000c], ah
+L_80000BCE: or al, 0x2b
+L_80000BD0: add [rdx], al
+L_80000BD2: db 0x6
+L_80000BD3: db 0x7
+L_80000BD4: or [rax], ch
+L_80000BD6: add [rax], eax
+L_80000BD8: add [rbx], ch
+L_80000BDA: adc eax, [rip+0x511002b]
+L_80000BE0: sub al, [rax]
+L_80000BE2: add [rax], al
+L_80000BE4: adc esi, [rax]
+L_80000BE6: add al, [rax]
+L_80000BE8: sbb al, 0x0
+L_80000BEA: add [rax], al
+L_80000BEC: db 0x6
+L_80000BED: add [rax], al
+L_80000BEF: adc [rax], eax
+L_80000BF1: add ch, [rdi+0x65]
+L_80000BF4: add [rax], al
+L_80000BF6: or al, [rbx]
+L_80000BF8: sub [rdx], al
+L_80000BFA: add [rax], al
+L_80000BFC: sub esi, [rax+rdx+0x0]
+L_80000C00: add [rbx], bl
+L_80000C02: jae 0x80000c6a
+L_80000C04: add [rax], al
+L_80000C06: or cl, [rdx]
+L_80000C08: sub eax, [rax]
+L_80000C0A: db 0x6
+L_80000C0B: sub dl, [rbx]
+L_80000C0D: xor [rax+rax], al
+L_80000C10: sub eax, 0x6000000
+L_80000C15: add [rax], al
+L_80000C17: adc [rax], eax
+L_80000C19: add ch, [rdi+0x65]
+L_80000C1C: add [rax], al
+L_80000C1E: or al, [rbx]
+L_80000C20: jae 0x80000c7f
+L_80000C22: add [rax], al
+L_80000C24: or al, [rbx+rsi*2]
+L_80000C27: pop rbp
+L_80000C28: add [rax], al
+L_80000C2A: or al, [rip+0x5d73]
+L_80000C30: or ch, [rax]
+L_80000C32: add eax, [rax]
+L_80000C34: add [rbx], ch
+L_80000C36: jz 0x80000c48
+L_80000C38: add [rax], al
+L_80000C3A: sbb esi, [rbx+0x66]
+L_80000C3D: add [rax], al
+L_80000C3F: or cl, [rdx]
+L_80000C41: sub eax, [rax]
+L_80000C43: db 0x6
+L_80000C44: sub al, [rax]
+L_80000C46: add [rax], al
+L_80000C48: adc esi, [rax]
+L_80000C4A: add al, [rax]
+L_80000C4C: sbb al, 0x0
+L_80000C4E: add [rax], al
+L_80000C50: db 0x6
+L_80000C51: add [rax], al
+L_80000C53: adc [rax], eax
+L_80000C55: add ch, [rdi+0x65]
+L_80000C58: add [rax], al
+L_80000C5A: or al, [rbx]
+L_80000C5C: sub [rax+rax], al
+L_80000C5F: add [rbx], ch
+L_80000C61: jz 0x80000c73
+L_80000C63: add [rax], al
+L_80000C65: sbb esi, [rbx+0x66]
+L_80000C68: add [rax], al
+L_80000C6A: or cl, [rdx]
+L_80000C6C: sub eax, [rax]
+L_80000C6E: db 0x6
+L_80000C6F: sub al, [rdx+0x0]
+L_80000C72: add al, [rbx]
+L_80000C74: jge 0x80000c81
+L_80000C76: add [rax], al
+L_80000C78: add al, 0x2
+L_80000C7A: add al, 0x7d
+L_80000C7C: or al, 0x0
+L_80000C7E: add [rdx+rbp], al
+L_80000C81: jp 0x80000c85
+L_80000C83: sub [rcx+0x0], al
+L_80000C86: add [rdx], cl
+L_80000C88: add [rax], al
+L_80000C8A: add al, [rip+0x247d]
+L_80000C90: add al, 0x2
+L_80000C92: add al, 0x7d
+L_80000C94: and eax, [rax]
+L_80000C96: add [rdx+rax], al
+L_80000C99: add edi, [rbp+0x22]
+L_80000C9C: add [rax], al
+L_80000C9E: add al, 0x2a
+L_80000CA0: adc esi, [rax]
+L_80000CA2: add al, 0x0
+L_80000CA4: add [r8], al
+L_80000CA7: add [rdi], al
+L_80000CA9: add [rax], al
+L_80000CAB: adc [rax], eax
+L_80000CAD: add ecx, [rbp+0x100007a]
+L_80000CB3: or al, [rbx]
+L_80000CB5: db 0x17
+L_80000CB6: pop rcx
+L_80000CB7: or al, 0x2b
+L_80000CB9: and [rax], al
+L_80000CBB: add bl, [rdi]
+L_80000CBD: adc [rdx], dl
+L_80000CBF: add [rax], ebp
+L_80000CC1: add [eax], al
+L_80000CC4: or dl, [rax]
+L_80000CC6: add [rsi], al
+L_80000CC8: or [rdx-0x14], dh
+L_80000CCB: add [rax], eax
+L_80000CCD: jo 0x80000cd6
+L_80000CCF: sub [rax+0x0], ch
+L_80000CD2: add [rdx], cl
+L_80000CD4: popf
+L_80000CD5: add [rax], cl
+L_80000CD7: db 0x17
+L_80000CD8: pop rcx
+L_80000CD9: or al, 0x8
+L_80000CDB: adc eax, 0x90d02fe
+L_80000CE0: sub eax, 0x667306d8
+L_80000CE5: add [rax], al
+L_80000CE7: or dl, [rbx]
+L_80000CE9: add al, 0x2b
+L_80000CEB: add [rcx], dl
+L_80000CED: add al, 0x2a
+L_80000CEF: add [rbx], dl
+L_80000CF1: xor [rax+rax], al
+L_80000CF4: push rsp
+L_80000CF5: add al, [rax]
+L_80000CF7: add [rax], cl
+L_80000CF9: add [rax], al
+L_80000CFB: adc [rax], eax
+L_80000CFD: add bh, [rbx+0x23]
+L_80000D00: add [rax], al
+L_80000D02: add al, 0xb
+L_80000D04: db 0x7
+L_80000D05: or al, [rsi]
+L_80000D07: db 0x1f
+L_80000D08: adc [rip+0x5917062d], dh
+L_80000D0E: db 0x45
+L_80000D0F: add al, 0x0
+L_80000D11: add [rax], al
+L_80000D13: mov al, [0x2000000179000001]
+L_80000D1C: add al, [rax]
+L_80000D1E: add bl, ch
+L_80000D20: add [rax], al
+L_80000D22: add [rbx], ch
+L_80000D24: add [rsi], al
+L_80000D26: db 0x1e
+L_80000D27: db 0x2e
+L_80000D28: cmp [rbx], ebp
+L_80000D2A: add [rsi], al
+L_80000D2C: db 0x1f
+L_80000D2D: adc [rbx], bh
+L_80000D2F: test [rcx], al
+L_80000D31: add [rax], al
+L_80000D33: cmp [rbx], cl
+L_80000D35: add al, [rax]
+L_80000D37: add [rsi], al
+L_80000D39: and [rax], al
+L_80000D3B: add [rax], eax
+L_80000D3D: add [rbx], bh
+L_80000D3F: xchg esp, eax
+L_80000D40: add [rax], eax
+L_80000D42: add [rbx], ch
+L_80000D44: add [rsi], al
+L_80000D46: and [rax], al
+L_80000D48: add al, 0x0
+L_80000D4A: add [rbx], bh
+L_80000D4C: mov dword [rcx], 0x2b0000
+L_80000D52: db 0x6
+L_80000D53: and [rax], al
+L_80000D55: or [rax], al
+L_80000D57: add [rbx], bh
+L_80000D59: mov es, [rcx]
+L_80000D5B: add [rax], al
+L_80000D5D: cmp cl, ah
+L_80000D5F: add [rax], eax
+L_80000D61: add [rax], al
+L_80000D63: jae 0x80000dce
+L_80000D65: add [rax], al
+L_80000D67: or cl, [rax+rcx]
+L_80000D6A: db 0x1f
+L_80000D6B: and ch, [rdi+0x6a]
+L_80000D6E: add [rax], al
+L_80000D70: or ah, [rsi]
+L_80000D72: add [rdx], al
+L_80000D74: jnp 0x80000d98
+L_80000D76: add [rax], al
+L_80000D78: add al, 0x74
+L_80000D7A: jz 0x80000d7c
+L_80000D7C: add [rcx], al
+L_80000D7E: or eax, 0x2b041316
+L_80000D83: push 0x9
+L_80000D85: adc [rdi+rbp*2], eax
+L_80000D88: push 0x130a0000
+L_80000D8D: add eax, 0x20051100
+L_80000D92: inc dword [rax]
+L_80000D94: add [rax], al
+L_80000D96: inc byte [rdx]
+L_80000D98: adc eax, [rsi]
+L_80000D9A: adc [rsi], eax
+L_80000D9C: sub al, 0x1d
+L_80000D9E: add [rax], cl
+L_80000DA0: jb 0x80000db0
+L_80000DA2: add al, [rax]
+L_80000DA4: jo 0x80000db7
+L_80000DA6: add eax, 0x1c281a
+L_80000DAB: add [rsi], al
+L_80000DAD: sub [rbx+0x0], ch
+L_80000DB0: add [rdx], cl
+L_80000DB2: outsd
+L_80000DB3: insb
+L_80000DB4: add [rax], al
+L_80000DB6: or ah, [rsi]
+L_80000DB8: add [rbx], ch
+L_80000DBA: sub al, 0x7e
+L_80000DBC: and eax, 0x11040000
+L_80000DC1: add eax, 0x6d6f0712
+L_80000DC6: add [rax], al
+L_80000DC8: or dl, [rbx]
+L_80000DCA: or [rcx], dl
+L_80000DCC: or [rcx+0x7110800], ch
+L_80000DD3: outsd
+L_80000DD4: insb
+L_80000DD5: add [rax], al
+L_80000DD7: or ah, [rsi]
+L_80000DD9: add [rbx], ch
+L_80000DDB: or eax, [rax]
+L_80000DDD: or [rcx], dl
+L_80000DDF: add eax, 0x6a6f
+L_80000DE4: or ah, [rsi]
+L_80000DE6: add [rax], al
+L_80000DE8: adc [rdi+rdx], eax
+L_80000DEB: pop rax
+L_80000DEC: adc eax, [rcx+rdx]
+L_80000DEF: add al, 0x9
+L_80000DF1: outsd
+L_80000DF2: outsb
+L_80000DF3: add [rax], al
+L_80000DF5: or dh, [rdx]
+L_80000DF7: mov word [rax], cs
+L_80000DF9: db 0x1f
+L_80000DFA: and ch, [rdi+0x6a]
+L_80000DFD: add [rax], al
+L_80000DFF: or ah, [rsi]
+L_80000E01: or [rdi+0x6f], ch
+L_80000E04: add [rax], al
+L_80000E06: or dl, [rbx]
+L_80000E08: or [rax], edi
+L_80000E0A: db 0x3f
+L_80000E0B: add [rax], eax
+L_80000E0D: add [rax], al
+L_80000E0F: add bh, [rbx+0x22]
+L_80000E12: add [rax], al
+L_80000E14: add al, 0xa5
+L_80000E16: jl 0x80000e18
+L_80000E18: add [rcx], al
+L_80000E1A: adc ecx, [rdx]
+L_80000E1C: adc [rdx], ecx
+L_80000E1E: sub [rax+0x0], dh
+L_80000E21: add [rdx], cl
+L_80000E23: adc ecx, [rbx]
+L_80000E25: adc [rbx], ecx
+L_80000E27: sub al, 0x21
+L_80000E29: add [rcx], dl
+L_80000E2B: or ah, [rbx]
+L_80000E2D: add [rax], al
+L_80000E2F: add [rax], al
+L_80000E31: add [rax], al
+L_80000E33: add [rax], al
+L_80000E35: xor al, [rdi]
+L_80000E37: jb 0x80000e4d
+L_80000E39: add al, [rax]
+L_80000E3B: jo 0x80000e68
+L_80000E3D: add eax, 0x22672
+L_80000E42: jo 0x80000e57
+L_80000E44: or [rax], edi
+L_80000E46: add eax, [rcx]
+L_80000E48: add [rax], al
+L_80000E4A: adc [rdx], ecx
+L_80000E4C: sub [rcx+0x0], dh
+L_80000E4F: add [rdx], cl
+L_80000E51: adc ecx, [rcx+rdx]
+L_80000E54: or al, 0x2c
+L_80000E56: or eax, 0x23a7200
+L_80000E5B: add [rax+0x13], dh
+L_80000E5E: or [rax], edi
+L_80000E60: jmp 0x91000e65
+L_80000E65: or dl, [rcx]
+L_80000E67: or ch, [rax]
+L_80000E69: jb 0x80000e6b
+L_80000E6B: add [rdx], cl
+L_80000E6D: inc byte [rcx]
+L_80000E6F: adc ecx, [rip+0x192c0d11]
+L_80000E75: add [rdx], dl
+L_80000E77: or ch, [rax]
+L_80000E79: jae 0x80000e7b
+L_80000E7B: add [rdx], cl
+L_80000E7D: jb 0x80000ec1
+L_80000E7F: add al, [rax]
+L_80000E81: jo 0x80000eab
+L_80000E83: imul eax, [rax], 0x0
+L_80000E86: or dl, [rbx]
+L_80000E88: or [rax], edi
+L_80000E8A: mov edi, 0x12000000
+L_80000E8F: or ch, [rax]
+L_80000E91: jae 0x80000e93
+L_80000E93: add [rdx], cl
+L_80000E95: adc ecx, [rcx]
+L_80000E97: cmp [rcx+0x2000000], dh
+L_80000E9D: jnp 0x80000ec1
+L_80000E9F: add [rax], al
+L_80000EA1: add al, 0x28
+L_80000EA3: jz 0x80000ea5
+L_80000EA5: add [rdx], cl
+L_80000EA7: adc ecx, [rsi]
+L_80000EA9: adc cl, [rsi]
+L_80000EAB: sub [rbp+0x0], dh
+L_80000EAE: add [rdx], cl
+L_80000EB0: adc ecx, [rcx]
+L_80000EB2: cmp [rsi+0x72000000], dl
+L_80000EB8: add al, [rax]
+L_80000EBB: jo 0x80000ed0
+L_80000EBD: or [rax], edi
+L_80000EBF: mov al, [rax]
+L_80000EC1: add [rax], al
+L_80000EC3: add bh, [rbx+0x22]
+L_80000EC6: add [rax], al
+L_80000EC8: add al, 0x6f
+L_80000ECA: outsd
+L_80000ECB: add [rax], al
+L_80000ECD: or ch, [rdi+0x76]
+L_80000ED0: add [rax], al
+L_80000ED2: or dl, [rbx]
+L_80000ED4: or [rbx], ebp
+L_80000ED6: jbe 0x80000eda
+L_80000ED8: jz 0x80000ee4
+L_80000EDA: add [rax], al
+L_80000EDC: add bh, [rbx+0x29]
+L_80000EDF: add [rax], al
+L_80000EE1: add al, 0x6f
+L_80000EE3: outsd
+L_80000EE4: add [rax], al
+L_80000EE6: or dl, [rbx]
+L_80000EE8: or [rbx], ebp
+L_80000EEA: db 0x62
+L_80000EEB: jb 0x80000f3f
+L_80000EED: add al, [rax]
+L_80000EEF: jo 0x80000ef3
+L_80000EF1: jnp 0x80000f15
+L_80000EF3: add [rax], al
+L_80000EF5: add al, 0xa5
+L_80000EF7: sub al, 0x0
+L_80000EF9: add [rcx], al
+L_80000EFB: adc ecx, [rdi]
+L_80000EFD: adc cl, [rdi]
+L_80000EFF: jb 0x80000f57
+L_80000F01: add al, [rax]
+L_80000F03: jo 0x80000f2d
+L_80000F05: ja 0x80000f07
+L_80000F07: add [rdx], cl
+L_80000F09: jb 0x80000f5d
+L_80000F0B: add al, [rax]
+L_80000F0D: jo 0x80000f37
+L_80000F0F: add [r8], al
+L_80000F12: or dl, [rbx]
+L_80000F14: or [rbx], ebp
+L_80000F16: db 0x36
+L_80000F17: jb 0x80000f6b
+L_80000F19: add al, [rax]
+L_80000F1B: jo 0x80000f1f
+L_80000F1D: jnp 0x80000f41
+L_80000F1F: add [rax], al
+L_80000F21: add al, 0xa5
+L_80000F23: sub al, 0x0
+L_80000F25: add [rcx], al
+L_80000F27: adc ecx, [rdi]
+L_80000F29: adc cl, [rdi]
+L_80000F2B: jb 0x80000fab
+L_80000F2D: add al, [rax]
+L_80000F2F: jo 0x80000f59
+L_80000F31: ja 0x80000f33
+L_80000F33: add [rdx], cl
+L_80000F35: jb 0x80000f89
+L_80000F37: add al, [rax]
+L_80000F39: jo 0x80000f63
+L_80000F3B: add [r8], al
+L_80000F3E: or dl, [rbx]
+L_80000F40: or [rbx], ebp
+L_80000F42: or al, [rdx]
+L_80000F44: outsd
+L_80000F45: outsd
+L_80000F46: add [rax], al
+L_80000F48: or dl, [rbx]
+L_80000F4A: or [rbx], ebp
+L_80000F4C: add [rcx], dl
+L_80000F4E: or [rdx], ebp
+L_80000F50: adc esi, [rax]
+L_80000F52: add al, [rax]
+L_80000F54: or eax, 0x6000000
+L_80000F59: add [rax], al
+L_80000F5B: adc [rax], eax
+L_80000F5D: add dl, [rdi]
+L_80000F5F: outsd
+L_80000F60: sbb eax, 0xa060000
+L_80000F65: sub eax, [rax]
+L_80000F67: db 0x6
+L_80000F68: sub al, [rax]
+L_80000F6A: add [rax], al
+L_80000F6C: adc esi, [rax]
+L_80000F6E: add al, [rax]
+L_80000F70: db 0x27
+L_80000F71: add [rax], al
+L_80000F73: add [rcx], cl
+L_80000F75: add [rax], al
+L_80000F77: adc [rax], eax
+L_80000F79: lar ebp, [rax]
+L_80000F7C: pop rcx
+L_80000F7D: add [rax], al
+L_80000F7F: or cl, [rdx]
+L_80000F81: db 0x6
+L_80000F82: sub al, 0xf
+L_80000F84: add [rdx], al
+L_80000F86: lar ebp, [rax]
+L_80000F89: pop rdx
+L_80000F8A: add [rax], al
+L_80000F8C: or bh, [rbp+0x24]
+L_80000F8F: add [rax], al
+L_80000F91: add al, 0x0
+L_80000F93: add dl, [rdi]
+L_80000F95: outsd
+L_80000F96: sbb eax, 0xb060000
+L_80000F9B: sub eax, [rax]
+L_80000F9D: db 0x7
+L_80000F9E: sub al, [rax]
+L_80000FA0: adc esi, [rax]
+L_80000FA2: add al, [rax]
+L_80000FA4: db 0x27
+L_80000FA5: add [rax], al
+L_80000FA7: add [rcx], cl
+L_80000FA9: add [rax], al
+L_80000FAB: adc [rax], eax
+L_80000FAD: db 0xf
+L_80000FAE: add al, 0x28
+L_80000FB0: pop rcx
+L_80000FB1: add [rax], al
+L_80000FB3: or cl, [rdx]
+L_80000FB5: db 0x6
+L_80000FB6: sub al, 0xf
+L_80000FB8: add [rdx], al
+L_80000FBA: db 0xf
+L_80000FBB: add al, 0x28
+L_80000FBD: pop rdx
+L_80000FBE: add [rax], al
+L_80000FC0: or bh, [rbp+0x24]
+L_80000FC3: add [rax], al
+L_80000FC5: add al, 0x0
+L_80000FC7: add dl, [rdi]
+L_80000FC9: outsd
+L_80000FCA: sbb eax, 0xb060000
+L_80000FCF: sub eax, [rax]
+L_80000FD1: db 0x7
+L_80000FD2: sub al, [rax]
+L_80000FD4: adc esi, [rax]
+L_80000FD6: add al, [rax]
+L_80000FD8: sbb al, 0x0
+L_80000FDA: add [rax], al
+L_80000FDC: or al, [rax]
+L_80000FDE: add [rcx], dl
+L_80000FE0: add [rdx], al
+L_80000FE2: db 0xe
+L_80000FE3: add al, 0x7d
+L_80000FE5: and al, 0x0
+L_80000FE7: add [rdx+rax], al
+L_80000FEA: db 0x17
+L_80000FEB: outsd
+L_80000FEC: sbb eax, 0xe060000
+L_80000FF1: add al, 0x73
+L_80000FF3: sbb al, [rax]
+L_80000FF5: add [rsi], al
+L_80000FF7: or ch, [rbx]
+L_80000FF9: add [rsi], al
+L_80000FFB: sub dl, [rbx]
+L_80000FFD: xor [rdx], al
+L_80000FFF: db 0x0
\ No newline at end of file
diff --git a/error with nan and inf description.txt b/error with nan and inf description.txt
new file mode 100644
index 0000000..53fce1f
--- /dev/null
+++ b/error with nan and inf description.txt
@@ -0,0 +1,34 @@
+I'm working on a project in C# on .NET Framework 4.0, and the mere presence of NaN, -inf, and +inf in my code (see the sample below) causes a compiler error.
+
+This project is based on the [NotepadPlusPlusPluginPack.NET](https://github.com/kbilsted/NotepadPlusPlusPluginPack.Net), and indeed the same error appears when I introduce those constants in the demo plugin that comes with that repo.
+
+EDIT: It's definitely somehow linked to the plugin pack mentioned above, because I can use those constants in a new .NET 4.0 project with no problems.
+
+For example, any of the first three lines in the block below will raise an error.
+```cs
+double nan = double.NaN;
+double neginf = double.NegativeInfinity;
+double inf = double.PositiveInfinity;
+if (nan == inf || inf == neginf) { }
+```
+
+And no, I don't have to assign a variable to one of those three; just mentioning one of them will throw an error.
+
+My project builds fine otherwise. I can replace every instance of -inf, NaN or +inf with 1d or some other double, and my project will build. And it's not like it builds but it's somehow horribly flawed; every other feature is fine as far as I can tell.
+
+This problem occurs whether I use Visual Studio 2022 or Visual Studio 2019.
+
+Here's a representative line of code and associated error message:
+```cs
+double inf = double.PositiveInfinity;
+```
+
+```
+Severity Code Description Project File Line Suppression State
+Error
+