diff --git a/README.md b/README.md
index 27b8a9a..e2048d5 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# Sublime IDE for Salesforce
-This plugin supports [Sublime Text 3](http://www.sublimetext.com/3) for windows and OSX, not tested for Linux.
+This plugin supports [Sublime Text 3](http://www.sublimetext.com/3) for windows and OSX, has not enough testing for Linux yet.
-All of my motivation on this plugin come from your star, if you think this plugin is helpful in your daily work, please **star** this plugin.
+All of our motivation on this plugin come from your star, if you think this plugin is helpful in your daily work, please **star** this plugin.
# Installation
@@ -14,7 +14,7 @@ Or, You can follow the step-by-step [instruction](https://meighanrockssf.wordpre
# Project Configuration
After you have installed this plugin successfully, you can follow Project Configuration to configure your own project.
-If you don't want to keep your user credential information in the plugin , you just need to do it as below format, plugin will lanuch the browser to start OAuth2 Login process,
+If you don't want to keep your user credential information in the plugin , you just need to do it as below format, plugin will launch the browser to start OAuth2 Login process,
```javascript
"projects": {
"pro-sublime": {
@@ -64,6 +64,12 @@ If you don't want to keep your user credential information in the plugin , you j
> - [Salesforce oAuth2](https://github.com/neworganizing/salesforce-oauth2)
> - [SalesforceXyTools](https://github.com/exiahuang/SalesforceXyTools)
+# Feedback & Contribution
+Feel free to open issues, but you should refer to the Raise Issue before
+reporting any bug.
+
+well welcome to any contribution, open an issue for discussion before draft you code.
+
# Q&A
+ ``Refresh Package`` vs ``Update Project``
* ``Refresh Package`` can update the project by the ``package.xml`` in the project folder or ``project/src`` folder
@@ -78,7 +84,7 @@ If you don't want to keep your user credential information in the plugin , you j
* ``Deploy to Server`` is achieved by ``Metadata API`` tech, which is usually used to develop none-apex in sandbox, deploy any components into different org or production
+ What's the usage of ``Update Project Pattern`` command?
- * Everytime when you udpate the ``file_exclude_patterns`` or ``folder_exclude_patterns``, you must execute ``Update Project Pattern`` command to ensure it is working.
+ * Everytime when you update the ``file_exclude_patterns`` or ``folder_exclude_patterns``, you must execute ``Update Project Pattern`` command to ensure it is working.
* Everytime when the default project doesn't appeared in the sidebar panel, you an use this command to show the default project.
+ If you failed to deploy package after release 3.3.7
diff --git a/aura.py b/aura.py
index f2e8a65..13aed3e 100644
--- a/aura.py
+++ b/aura.py
@@ -196,12 +196,13 @@ def __init__(self, *args, **kwargs):
super(DestructLightningFromServer, self).__init__(*args, **kwargs)
def run(self, dirs):
- if sublime.ok_cancel_dialog("This will Delete the whole folder both from the server and local!" +
- " Confirm to continue?"):
+ _, bundle_name = os.path.split(dirs[0])
+ if sublime.ok_cancel_dialog("This will Delete %s !" % bundle_name + " Confirm to continue?"):
processor.handle_destructive_files(dirs, ignore_folder=False)
def is_visible(self, dirs):
- if len(dirs) == 0: return False
+ if len(dirs) == 0:
+ return False
self.settings = context.get_settings()
for _dir in dirs:
attributes = util.get_file_attributes(_dir)
@@ -226,7 +227,7 @@ def run(self, dirs, element=""):
template = templates.get("AuraElement").get(element)
settings = context.get_settings()
templates_path = os.path.join(settings["workspace"],
- ".templates", template["directory"])
+ ".templates", template["directory"])
with open(templates_path) as fp:
body = fp.read()
@@ -297,7 +298,7 @@ def __init__(self, *args, **kwargs):
def run(self, _type=""):
self._type = _type
self.window.show_input_panel("Please Input %s Name: " % _type,
- "", self.on_input, None, None)
+ "", self.on_input, None, None)
def on_input(self, lightning_name):
# Create component to local according to user input
@@ -305,7 +306,7 @@ def on_input(self, lightning_name):
message = 'Invalid format, do you want to try again?'
if not sublime.ok_cancel_dialog(message): return
self.window.show_input_panel("Please Input %s Name: " % self._type,
- "", self.on_input, None, None)
+ "", self.on_input, None, None)
return
# Get settings
@@ -326,10 +327,10 @@ def on_input(self, lightning_name):
message = "%s is already exist, do you want to try again?" % lightning_name
if not sublime.ok_cancel_dialog(message, "Try Again?"): return
self.window.show_input_panel("Please Input Lightning Name: ",
- "", self.on_input, None, None)
+ "", self.on_input, None, None)
return
- lightning_file = os.path.join(component_dir, lightning_name+template["extension"])
+ lightning_file = os.path.join(component_dir, lightning_name + template["extension"])
# Create Aura lightning file
with open(lightning_file, "w") as fp:
diff --git a/completions.py b/completions.py
index d16bed9..b75dddc 100644
--- a/completions.py
+++ b/completions.py
@@ -31,13 +31,17 @@ def load_sobject_cache(reload_cache=False, username=None):
class PackageCompletions(sublime_plugin.EventListener):
+ """
+ Completions for Package.xml
+ """
def on_query_completions(self, view, prefix, locations):
if not view.match_selector(locations[0], "text.xml"):
return []
# Check whether current file is package file
pattern = "[\\s.*<>\\-\\w/\\%1-9]+"
- if not view.find_all(pattern): return
+ if not view.find_all(pattern):
+ return
location = locations[0]
pt = locations[0] - len(prefix) - 1
diff --git a/docs/issue.md b/docs/issue.md
new file mode 100644
index 0000000..c2c35bc
--- /dev/null
+++ b/docs/issue.md
@@ -0,0 +1,30 @@
+# Raise Issue
+
+Create an [Issue][1] for following situations:
+- Find a bug
+- Have a good idea about any enhancement/new feature
+
+### Report a Bug ###
+
+For better investigation, you should include following message:
+
+1. Title: a concise summary about the bug
+2. Description about the bug
+ - Details with text description, provide screenshot if necessary.
+ - Steps to recur the bug.
+ - Any workaround you have tried.
+3. Context information
+ - Operation System and version. e.g *Windows 10, version 1903*
+ - Sublime Test 3 version. e.g *Version 3.2.2 Build 3211*
+ - Your HaoIDE version. e.g *Build 3.6.0*
+ - Sublime Text 3 console log. You can press `Ctrl+~` to open the console panel.
+ - Your user settings if necessary.
+ - Any other necessary information.
+
+### Propose an Enhancement or a New Feature
+
+Just open an issue to discuss it!
+
+If you want to contribute, also open an issue before draft the code!
+
+[1]: https://github.com/xjsender/haoide/issues
\ No newline at end of file
diff --git a/lwc.py b/lwc.py
index 0d8e0a9..fce5131 100644
--- a/lwc.py
+++ b/lwc.py
@@ -158,7 +158,8 @@ def create_resource(self, js_file_name=None):
".templates", template["directory"])
extension = template["extension"]
- element_name = (self.lwc_name if js_file_name is None else self.lwc_name + js_file_name) + extension
+ element_name = self.lwc_name if js_file_name is None else js_file_name
+ element_name += extension
# Combine lwc element component name
element_file = os.path.join(self._dir, element_name)
diff --git a/main.py b/main.py
index 800a6fc..af14e64 100644
--- a/main.py
+++ b/main.py
@@ -18,7 +18,6 @@
from . import context
from . import util
-
from .salesforce.lib import xmlformatter
from .salesforce.lib.jsontoapex import JSONConverter
from .salesforce.lib.panel import Printer
@@ -66,8 +65,8 @@ def is_enabled(self):
self.choose_all = False
if not self.selection:
self.choose_all = True
- self.selection = self.view.substr(sublime.Region(0,
- self.view.size()))
+ self.selection = self.view.substr(sublime.Region(0,
+ self.view.size()))
return True
@@ -90,7 +89,7 @@ def run(self, edit):
lables_metadata = formatter.format_string(lables_metadata)
except ValueError as ve:
return Printer.get('error').write(str(ve))
-
+
view = sublime.active_window().new_file()
view.set_syntax_file("Packages/XML/XML.tmLanguage")
view.run_command("new_view", {
@@ -114,7 +113,7 @@ def run(self, edit):
except ValueError as ve:
raise ve
return Printer.get('error').write(str(ve))
-
+
view = sublime.active_window().new_file()
view.set_syntax_file("Packages/XML/XML.tmLanguage")
view.run_command("new_view", {
@@ -126,11 +125,11 @@ def run(self, edit):
class JsonFormat(BaseSelection, sublime_plugin.TextCommand):
def run(self, edit):
try:
- formatted_json = json.dumps(json.loads(self.selection),
- ensure_ascii=False, indent=4)
+ formatted_json = json.dumps(json.loads(self.selection),
+ ensure_ascii=False, indent=4)
except ValueError as ve:
return Printer.get('error').write(str(ve))
-
+
if not self.choose_all:
view = sublime.active_window().new_file()
view.run_command("new_view", {
@@ -177,9 +176,9 @@ def run(self, edit):
except ValueError as ve:
return Printer.get('error').write(str(ve))
- sublime.active_window().show_input_panel("Input Class Name: ",
- "JSON2Apex", self.on_input_name, None, None)
-
+ sublime.active_window().show_input_panel("Input Class Name: ",
+ "JSON2Apex", self.on_input_name, None, None)
+
def on_input_name(self, name):
if not name: name = "JSON2Apex"
@@ -278,18 +277,18 @@ class DiffWithServer(sublime_plugin.TextCommand):
def run(self, edit, switch=True, source_org=None):
if not source_org:
source_org = self.settings["default_project_name"]
-
+
if switch:
return self.view.window().run_command("switch_project", {
"callback_options": {
- "callback_command": "diff_with_server",
+ "callback_command": "diff_with_server",
"args": {
"switch": False,
"source_org": source_org
}
}
})
-
+
file_name = self.view.file_name()
attr = util.get_component_attribute(file_name, False, reload_cache=True)[0]
@@ -302,7 +301,7 @@ def run(self, edit, switch=True, source_org=None):
def is_enabled(self):
self.file_name = self.view.file_name()
- if not self.file_name:
+ if not self.file_name:
return False
self.settings = context.get_settings()
@@ -394,8 +393,8 @@ def run(self, callback_options={}):
# Add subscribed ones and unsubscribed ones to list
self.items.extend(sorted(subscribed_items))
self.items.extend(sorted(unsubscripted_items))
- self.window.show_quick_panel(self.items, self.on_done,
- sublime.MONOSPACE_FONT)
+ self.window.show_quick_panel(self.items, self.on_done,
+ sublime.MONOSPACE_FONT)
def on_done(self, index):
if index == -1:
@@ -437,7 +436,7 @@ def on_done(self, index):
s.set("projects", projects)
sublime.save_settings(context.TOOLING_API_SETTINGS)
- sublime.set_timeout(lambda:sublime.active_window().run_command("toggle_metadata_objects", {
+ sublime.set_timeout(lambda: sublime.active_window().run_command("toggle_metadata_objects", {
"callback_options": self.callback_options
}), 10)
@@ -472,7 +471,7 @@ def run(self):
if not sublime.ok_cancel_dialog(message, "Confirm Clear?"): return
settings = context.get_settings()
- session_path = settings["workspace"]+"/.config/session.json"
+ session_path = settings["workspace"] + "/.config/session.json"
try:
os.remove(session_path)
sublime.status_message("Session cache is cleared")
@@ -486,7 +485,7 @@ def __init__(self, *args, **kwargs):
def run(self, cache_name):
self.cache_name = cache_name
- self.cache_settings = self.cache_name+".sublime-settings"
+ self.cache_settings = self.cache_name + ".sublime-settings"
self.caches = util.get_sobject_caches(self.cache_settings)
if not self.caches:
Printer.get('error').write("No cache already")
@@ -500,7 +499,7 @@ def on_done(self, index):
if not sublime.ok_cancel_dialog(message, "Confirm Clear"): return
util.clear_cache(self.caches[index][1], self.cache_settings)
- sublime.set_timeout(lambda:sublime.active_window().run_command("clear_cache", {
+ sublime.set_timeout(lambda: sublime.active_window().run_command("clear_cache", {
"cache_name": self.cache_name
}), 10)
@@ -510,8 +509,8 @@ def __init__(self, *args, **kwargs):
super(Convert15Id218Id, self).__init__(*args, **kwargs)
def run(self):
- self.window.show_input_panel("Input 15 Id: ",
- "", self.on_input, None, None)
+ self.window.show_input_panel("Input 15 Id: ",
+ "", self.on_input, None, None)
def on_input(self, input):
c18Id = util.convert_15_to_18(input)
@@ -523,8 +522,8 @@ def __init__(self, *args, **kwargs):
super(DecodeUrl, self).__init__(*args, **kwargs)
def run(self):
- self.window.show_input_panel("Input your URL to be decoded: ",
- "", self.on_input, None, None)
+ self.window.show_input_panel("Input your URL to be decoded: ",
+ "", self.on_input, None, None)
def on_input(self, input):
decodedUrl = urllib.request.unquote(input)
@@ -536,8 +535,8 @@ def __init__(self, *args, **kwargs):
super(EncodeUrl, self).__init__(*args, **kwargs)
def run(self):
- self.window.show_input_panel("Input your URL to be encoded: ",
- "", self.on_input, None, None)
+ self.window.show_input_panel("Input your URL to be encoded: ",
+ "", self.on_input, None, None)
def on_input(self, input):
encodedUrl = urllib.request.quote(input)
@@ -559,32 +558,33 @@ def on_done(self, index):
self.filters = ["all", "updateable", "createable", "custom"]
self.display_filters = [a.capitalize() for a in self.filters]
- sublime.set_timeout(lambda:self.window.show_quick_panel(self.display_filters, self.on_choose_action), 10)
+ sublime.set_timeout(lambda: self.window.show_quick_panel(self.display_filters, self.on_choose_action), 10)
def on_choose_action(self, index):
if index == -1: return
processor.handle_generate_sobject_soql(self.sobject, self.filters[index])
+
class ExportQueryToCsv(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportQueryToCsv, self).__init__(*args, **kwargs)
def run(self, tooling=False):
self.tooling = tooling
- sublime.active_window().show_input_panel('Input Your %s SOQL:' %
- ('Tooling' if tooling else ''), "", self.on_input_soql, None, None)
+ sublime.active_window().show_input_panel('Input Your %s SOQL:' %
+ ('Tooling' if tooling else ''), "", self.on_input_soql, None, None)
def on_input_soql(self, soql):
self.soql = soql.strip()
# Check whether the soql is valid and not parent-to-child query
- match = re.match("[\\n\\s]*SELECT\\s+[*\\w\\n,.:_\\s()]+?\\s+FROM\\s+[1-9_a-zA-Z]+",
- self.soql, re.IGNORECASE)
+ match = re.match("[\\n\\s]*SELECT\\s+[*\\w\\n,.:_\\s()]+?\\s+FROM\\s+[1-9_a-zA-Z]+",
+ self.soql, re.IGNORECASE)
if not match:
Printer.get("error").write("Your input SOQL is not valid")
if sublime.ok_cancel_dialog("Want to try again?"):
- self.window.show_input_panel('Input Your SOQL:',
- "", self.on_input_soql, None, None)
+ self.window.show_input_panel('Input Your SOQL:',
+ "", self.on_input_soql, None, None)
return
# This feature does not support parent to child query
@@ -592,16 +592,16 @@ def on_input_soql(self, soql):
if len(matchs) > 1:
Printer.get("error").write("This feature does not support parent-to-child query")
if sublime.ok_cancel_dialog("Want to try again?"):
- self.window.show_input_panel('Input Your SOQL:',
- "", self.on_input_soql, None, None)
+ self.window.show_input_panel('Input Your SOQL:',
+ "", self.on_input_soql, None, None)
return
# Parse the sObject Name for CSV name
matchstr = match.group(0)
- self.sobject = matchstr[matchstr.rfind(" ")+1:]
+ self.sobject = matchstr[matchstr.rfind(" ") + 1:]
- sublime.active_window().show_input_panel('Input CSV Name:',
- self.sobject, self.on_input_name, None, None)
+ sublime.active_window().show_input_panel('Input CSV Name:',
+ self.sobject, self.on_input_name, None, None)
def on_input_name(self, name):
if not name: return
@@ -615,7 +615,7 @@ def __init__(self, *args, **kwargs):
def run(self, vertical=True):
self.vertical = vertical
self.sobject_recordtypes_attr = processor.populate_sobject_recordtypes()
- if not self.sobject_recordtypes_attr: return # Network Issue Cause
+ if not self.sobject_recordtypes_attr: return # Network Issue Cause
self.sobject_recordtypes = sorted(list(self.sobject_recordtypes_attr.keys()))
self.window.show_quick_panel(self.sobject_recordtypes, self.on_choose_recordtype)
@@ -629,8 +629,8 @@ def on_choose_recordtype(self, index):
recordtype_id = self.sobject_recordtypes_attr[sobject_recordtype]
# handle this describe request
- processor.handle_export_data_template_thread(sobject,
- recordtype_name, recordtype_id, self.vertical)
+ processor.handle_export_data_template_thread(sobject,
+ recordtype_name, recordtype_id, self.vertical)
def is_enabled(self):
return util.check_action_enabled()
@@ -639,7 +639,7 @@ def is_enabled(self):
class ExecuteRestTest(sublime_plugin.TextCommand):
def run(self, edit):
self.items = ["Get", "Post", "Put", "Patch", "Delete", "Tooling Query",
- "Query", "Query All", "Search", "Quick Search",
+ "Query", "Query All", "Search", "Quick Search",
"Head", "Retrieve Body"]
self.view.show_popup_menu(self.items, self.on_choose_action),
@@ -657,8 +657,8 @@ def on_input(self, data):
except ValueError as ve:
Printer.get('error').write(str(ve))
if not sublime.ok_cancel_dialog("Do you want to try again?", "Yes?"): return
- self.view.window().show_input_panel("Input JSON Body: ",
- "", self.on_input, None, None)
+ self.view.window().show_input_panel("Input JSON Body: ",
+ "", self.on_input, None, None)
return
processor.handle_execute_rest_test(self.chosen_action, self.sel, data)
@@ -679,13 +679,13 @@ def run(self, edit, is_background=False, allowed_folders=None):
sel_text = self.view.substr(self.view.word(sel.begin()))
settings = context.get_settings()
for ct in settings["subscribed_metadata_objects"]:
- if "suffix" not in settings[ct]:
+ if "suffix" not in settings[ct]:
continue
suffix = settings[ct]["suffix"]
folder = settings[ct]["directoryName"]
target_file = os.path.join(settings["workspace"] + \
- "/src/%s/%s.%s" % (folder, sel_text, suffix)
- )
+ "/src/%s/%s.%s" % (folder, sel_text, suffix)
+ )
if os.path.isfile(target_file):
if allowed_folders:
if folder in allowed_folders:
@@ -702,7 +702,7 @@ class SetCheckPointCommand(sublime_plugin.TextCommand):
def run(self, edit, mark):
sel = [s for s in self.view.sel()]
self.view.add_regions(mark, sel, "invalid", "dot",
- sublime.DRAW_SOLID_UNDERLINE | sublime.DRAW_EMPTY_AS_OVERWRITE)
+ sublime.DRAW_SOLID_UNDERLINE | sublime.DRAW_EMPTY_AS_OVERWRITE)
class RemoveCheckPointCommand(sublime_plugin.TextCommand):
@@ -716,7 +716,7 @@ def run(self, edit):
def is_enabled(self):
# Must Be File
- if not self.view.file_name():
+ if not self.view.file_name():
return False
self.file_name = self.view.file_name()
@@ -726,16 +726,16 @@ def is_enabled(self):
# Must be class or trigger
self.attributes = util.get_file_attributes(self.file_name)
- if not self.attributes["extension"]:
+ if not self.attributes["extension"]:
return False
- if self.attributes["metadata_folder"] not in ["classes", "triggers"]:
+ if self.attributes["metadata_folder"] not in ["classes", "triggers"]:
return False
# Can't be Test Class
with open(self.file_name, encoding="utf-8") as fp:
self.body = fp.read()
- if "@istest" in self.body.lower():
+ if "@istest" in self.body.lower():
return False
return True
@@ -774,6 +774,7 @@ def run(self, edit):
# Move focus to the coverage view
sublime.active_window().focus_view(coverage_view)
+
class NewViewCommand(sublime_plugin.TextCommand):
"""
Create a new view with specified input
@@ -793,6 +794,7 @@ def run(self, edit, point=0, name="", input=""):
view.set_name(name)
view.insert(edit, point, input)
+
class NewDynamicViewCommand(sublime_plugin.TextCommand):
"""
Create a new view with specified input
@@ -812,7 +814,7 @@ def run(self, edit, view_id=None, view_name="", input="", point=0, erase_all=Fal
view = sublime.active_window().active_view()
if view_id and not view.id() == view_id:
for v in sublime.active_window().views():
- if v.id() == view_id:
+ if v.id() == view_id:
view = v
view.set_scratch(True)
@@ -820,6 +822,7 @@ def run(self, edit, view_id=None, view_name="", input="", point=0, erase_all=Fal
if erase_all: view.erase(edit, sublime.Region(0, view.size()))
view.insert(edit, point, input)
+
class RefreshFolder(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(RefreshFolder, self).__init__(*args, **kwargs)
@@ -836,6 +839,7 @@ def is_visible(self, dirs):
return True
+
class RetrieveMetadataCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(RetrieveMetadataCommand, self).__init__(*args, **kwargs)
@@ -856,10 +860,11 @@ def run(self, retrieve_all=True):
processor.handle_refresh_folder(types, not retrieve_all)
+
class RenameMetadata(sublime_plugin.TextCommand):
def run(self, edit):
- self.view.window().show_input_panel("Input New Name",
- self.filename, self.on_input, None, None)
+ self.view.window().show_input_panel("Input New Name",
+ self.filename, self.on_input, None, None)
def on_input(self, new_name):
if not new_name or not re.match("\w+[a-zA-Z0-9]+", new_name):
@@ -874,8 +879,8 @@ def is_enabled(self):
self.file_name = self.view.file_name()
base, filename = os.path.split(self.file_name)
base, folder = os.path.split(base)
- if folder not in self.settings["all_metadata_folders"]:return False
- if not util.check_enabled(self.view.file_name(), check_cache=False):
+ if folder not in self.settings["all_metadata_folders"]: return False
+ if not util.check_enabled(self.view.file_name(), check_cache=False):
return False
self.filename = filename.split(".")[0]
@@ -886,12 +891,13 @@ def is_enabled(self):
class RetrieveFileFromServer(sublime_plugin.TextCommand):
"""
- Retrieve Single File From Salesforce
+ Retrieve Single File From Salesforce via Metadata API
"""
+
def run(self, edit, switch=True):
files = [self.view.file_name()]
sublime.active_window().run_command("retrieve_files_from_server", {
- "files": files,
+ "files": files,
"switch": switch
})
@@ -901,7 +907,7 @@ def is_enabled(self):
attributes = util.get_file_attributes(self.view.file_name())
metadata_folder = attributes["metadata_folder"]
if metadata_folder not in self.settings["all_metadata_folders"]: return False
- if not util.check_enabled(self.view.file_name(), check_cache=False):
+ if not util.check_enabled(self.view.file_name(), check_cache=False):
return False
return True
@@ -912,8 +918,9 @@ def is_visible(self):
class RetrieveFilesFromServer(sublime_plugin.WindowCommand):
"""
- Retrieve List of files from Salesforce
+ Retrieve List of files from Salesforce via Metadata API
"""
+
def __init__(self, *args, **kwargs):
super(RetrieveFilesFromServer, self).__init__(*args, **kwargs)
@@ -933,7 +940,7 @@ def run(self, files, switch=True, source_org=None, confirmed=False, extract_to=N
if switch:
return self.window.run_command("switch_project", {
"callback_options": {
- "callback_command": "retrieve_files_from_server",
+ "callback_command": "retrieve_files_from_server",
"args": {
"files": files,
"switch": False,
@@ -978,7 +985,7 @@ def is_visible(self, files):
continue # Ignore folder
metadata_folder = util.get_metadata_folder(_file)
if metadata_folder not in settings["all_metadata_folders"]: return False
- if not util.check_enabled(_file, check_cache=False):
+ if not util.check_enabled(_file, check_cache=False):
return False
return True
@@ -997,6 +1004,10 @@ def is_enabled(self):
class DestructFileFromServer(sublime_plugin.TextCommand):
+ """
+ Destruct the selected code from Salesforce and delete from local folder
+ """
+
def run(self, edit):
files = [self.view.file_name()]
sublime.active_window().run_command("destruct_files_from_server", {
@@ -1007,8 +1018,8 @@ def is_enabled(self):
if not self.view or not self.view.file_name(): return False
self.settings = context.get_settings()
metadata_folder = util.get_metadata_folder(self.view.file_name())
- if metadata_folder not in self.settings["all_metadata_folders"]:return False
- if not util.check_enabled(self.view.file_name(), check_cache=False):
+ if metadata_folder not in self.settings["all_metadata_folders"]: return False
+ if not util.check_enabled(self.view.file_name(), check_cache=False):
return False
return True
@@ -1016,29 +1027,36 @@ def is_enabled(self):
def is_visible(self):
return self.is_enabled()
+
class DestructFilesFromServer(sublime_plugin.WindowCommand):
+ """
+ Destruct the selected code files from Salesforce and delete from local folder via Metadata API
+ """
+
def __init__(self, *args, **kwargs):
super(DestructFilesFromServer, self).__init__(*args, **kwargs)
def run(self, files):
- message = "Confirm destructing %s from server?" % (
+ _message = "Confirm destructing %s from server?" % (
"these files" if len(files) > 1 else "this file"
)
- if sublime.ok_cancel_dialog(message, "Confirm"):
+ if sublime.ok_cancel_dialog(_message, "Confirm"):
processor.handle_destructive_files(files)
def is_visible(self, files):
if len(files) == 0: return False
self.settings = context.get_settings()
for _file in files:
- if not os.path.isfile(_file): continue # Ignore folder
+ if not os.path.isfile(_file):
+ continue # Ignore folder
_folder = util.get_metadata_folder(_file)
if _folder not in self.settings["all_metadata_folders"]: return False
- if not util.check_enabled(_file, check_cache=False):
+ if not util.check_enabled(_file, check_cache=False):
return False
return True
+
class DeployZip(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(DeployZip, self).__init__(*args, **kwargs)
@@ -1053,8 +1071,8 @@ def run(self, zipfile_path=None, chosen_classes=[]):
path = sublime.get_clipboard()
if not path or not os.path.isfile(path): path = ""
if not path.endswith("zip"): path = ""
- self.window.show_input_panel("Input Zip File Path:",
- path, self.on_input, None, None)
+ self.window.show_input_panel("Input Zip File Path:",
+ path, self.on_input, None, None)
def on_input(self, zipfile_path):
if not zipfile_path.endswith('.zip'):
@@ -1067,11 +1085,11 @@ def on_input(self, zipfile_path):
def execute_deploy(self):
settings = context.get_settings()
deploy_options = settings["deploy_options"]
- testLevel = deploy_options.get("testLevel", "NoTestRun")
+ testLevel = deploy_options.get("testLevel", "NoTestRun")
if testLevel == "RunSpecifiedTests" and not self.chosen_classes:
return self.window.run_command("choose_test_classes", {
"callback_options": {
- "callback_command": "deploy_zip",
+ "callback_command": "deploy_zip",
"args": {
"zipfile_path": self.zipfile_path,
"chosen_classes": self.chosen_classes
@@ -1079,9 +1097,9 @@ def execute_deploy(self):
}
})
+ processor.handle_deploy_thread(util.base64_encode(self.zipfile_path),
+ chosen_classes=self.chosen_classes)
- processor.handle_deploy_thread(util.base64_encode(self.zipfile_path),
- chosen_classes=self.chosen_classes)
class DeployOpenFilesToServer(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
@@ -1090,9 +1108,9 @@ def __init__(self, *args, **kwargs):
def run(self, select_all=True):
# If deploy all open files
if select_all:
- return sublime.active_window().run_command("deploy_files_to_server",
- {"files": list(self.file_attributes.values())})
-
+ return sublime.active_window().run_command("deploy_files_to_server",
+ {"files": list(self.file_attributes.values())})
+
# If just deploy some files
if not hasattr(self, "chosen_files"):
self.chosen_files = []
@@ -1116,9 +1134,9 @@ def on_choose(self, index):
chosen_files.append(self.file_attributes[item[4:]])
if chosen_files:
- sublime.active_window().run_command("deploy_files_to_server",
- {"files": chosen_files}
- )
+ sublime.active_window().run_command("deploy_files_to_server",
+ {"files": chosen_files}
+ )
return
# Get chosen file name
@@ -1133,8 +1151,8 @@ def on_choose(self, index):
# Start next round
self.populate_items()
- sublime.set_timeout(lambda:self.window.show_quick_panel(self.items,
- self.on_choose, sublime.MONOSPACE_FONT), 10)
+ sublime.set_timeout(lambda: self.window.show_quick_panel(self.items,
+ self.on_choose, sublime.MONOSPACE_FONT), 10)
def is_enabled(self):
"""
@@ -1151,7 +1169,7 @@ def is_enabled(self):
for _view in views:
_file = _view.file_name()
# Ignore folder
- if not _file or not os.path.isfile(_file):
+ if not _file or not os.path.isfile(_file):
continue
attributes = util.get_file_attributes(_file)
# Ignore non-sfdc files
@@ -1161,11 +1179,12 @@ def is_enabled(self):
self.file_attributes[attributes["fullName"]] = _file
# If there is no sfdc code file, just disable this command
- if not self.file_attributes:
+ if not self.file_attributes:
return False
return True
+
class DeployFileToServer(sublime_plugin.TextCommand):
def run(self, edit, switch=True):
files = [self.view.file_name()]
@@ -1185,10 +1204,12 @@ def is_enabled(self):
def is_visible(self):
return self.is_enabled()
+
class DeployFileToThisServer(sublime_plugin.TextCommand):
"""
Deploy a opened file to current active Salesforce org
"""
+
def run(self, edit):
files = [self.view.file_name()]
sublime.active_window().run_command("deploy_files_to_server", {
@@ -1212,11 +1233,11 @@ def run(self, files, switch=True, source_org=None, chosen_classes=[]):
source_org = settings["default_project_name"]
deploy_options = settings["deploy_options"]
- testLevel = deploy_options.get("testLevel", "NoTestRun")
+ testLevel = deploy_options.get("testLevel", "NoTestRun")
if testLevel == "RunSpecifiedTests" and not chosen_classes:
return self.window.run_command("choose_test_classes", {
"callback_options": {
- "callback_command": "deploy_files_to_server",
+ "callback_command": "deploy_files_to_server",
"args": {
"files": files,
"switch": False,
@@ -1228,7 +1249,7 @@ def run(self, files, switch=True, source_org=None, chosen_classes=[]):
if switch:
return self.window.run_command("switch_project", {
"callback_options": {
- "callback_command": "deploy_files_to_server",
+ "callback_command": "deploy_files_to_server",
"args": {
"files": files,
"switch": False,
@@ -1247,7 +1268,7 @@ def run(self, files, switch=True, source_org=None, chosen_classes=[]):
# Keep the files to deploy
base64_encoded_zip = util.build_deploy_package(files)
processor.handle_deploy_thread(
- base64_encoded_zip,
+ base64_encoded_zip,
source_org=source_org,
chosen_classes=chosen_classes
)
@@ -1261,7 +1282,7 @@ def is_visible(self, files):
if not files: return False
self.settings = context.get_settings()
for _file in files:
- if not os.path.isfile(_file): continue # Ignore folder
+ if not os.path.isfile(_file): continue # Ignore folder
attributes = util.get_file_attributes(_file)
if attributes["metadata_folder"] not in self.settings["all_metadata_folders"]:
return False
@@ -1278,6 +1299,7 @@ def run(self, edit, switch=True, source_org=None):
def is_enabled(self):
return self.view.file_name() is not None
+
class CopyFilesToProject(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CopyFilesToProject, self).__init__(*args, **kwargs)
@@ -1290,7 +1312,7 @@ def run(self, files, switch=True, source_org=None):
if switch:
return self.window.run_command("switch_project", {
"callback_options": {
- "callback_command": "copy_files_to_project",
+ "callback_command": "copy_files_to_project",
"args": {
"files": files,
"switch": False,
@@ -1314,8 +1336,8 @@ def is_enabled(self, files, **kwargs):
self.settings = context.get_settings()
self.attributes = []
for _file in files:
- if not os.path.isfile(_file): continue # Ignore folder
- if _file.endswith("-meta.xml"): continue # Ignore meta file
+ if not os.path.isfile(_file): continue # Ignore folder
+ if _file.endswith("-meta.xml"): continue # Ignore meta file
attribute = util.get_file_attributes(_file)
if attribute["metadata_folder"] not in self.settings["all_metadata_folders"]:
continue
@@ -1331,6 +1353,7 @@ def is_enabled(self, files, **kwargs):
def is_visible(self, files, **kwargs):
return self.is_enabled(files)
+
class ExportProfile(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportProfile, self).__init__(*args, **kwargs)
@@ -1343,6 +1366,7 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class ExportValidationRulesCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportValidationRulesCommand, self).__init__(*args, **kwargs)
@@ -1359,6 +1383,7 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class ExportCustomLablesCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportCustomLablesCommand, self).__init__(*args, **kwargs)
@@ -1374,11 +1399,12 @@ def run(self):
outputdir = settings["workspace"] + "/.export/labels"
if not os.path.exists(outputdir): os.makedirs(outputdir)
lables = xmltodict.parse(open(lable_path, "rb").read())
- util.list2csv(outputdir+"/Labels.csv", lables["CustomLabels"]["labels"])
+ util.list2csv(outputdir + "/Labels.csv", lables["CustomLabels"]["labels"])
def is_enabled(self):
return util.check_action_enabled()
+
class ExportWorkflowsCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportWorkflowsCommand, self).__init__(*args, **kwargs)
@@ -1396,26 +1422,29 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class ExportCustomFieldCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportCustomFieldCommand, self).__init__(*args, **kwargs)
def run(self):
processor.handle_export_customfield()
-
+
def is_enabled(self):
return util.check_action_enabled()
+
class ExportRoleHierarchyCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportRoleHierarchyCommand, self).__init__(*args, **kwargs)
def run(self):
processor.handle_export_role_hierarchy()
-
+
def is_enabled(self):
return util.check_action_enabled()
+
class DescribeSobjectCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(DescribeSobjectCommand, self).__init__(*args, **kwargs)
@@ -1429,13 +1458,14 @@ def on_done(self, index):
if index == -1: return
processor.handle_describe_sobject(self.sobjects[index])
+
class ExportWorkbookCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExportWorkbookCommand, self).__init__(*args, **kwargs)
def run(self):
- self.window.show_input_panel("Input Sobjects(* or sobjects separated with semi-colon), Case is Sensitive",
- "*", self.on_input, None, None)
+ self.window.show_input_panel("Input Sobjects(* or sobjects separated with semi-colon), Case is Sensitive",
+ "*", self.on_input, None, None)
def on_input(self, input):
# Display the fields in a new view
@@ -1455,8 +1485,8 @@ def on_input(self, input):
if sobject not in sobjects_describe:
message = '"%s" is not valid sobject, do you want to try again?' % sobject
if not sublime.ok_cancel_dialog(message, "Continue?"): return
- self.window.show_input_panel("Sobjects(* means all, or sobjects seprated with semi-colon)",
- input, self.on_input, None, None)
+ self.window.show_input_panel("Sobjects(* means all, or sobjects seprated with semi-colon)",
+ input, self.on_input, None, None)
return
# After ensured input is valid, just start to generate workbooks
@@ -1465,6 +1495,7 @@ def on_input(self, input):
def is_enabled(self):
return util.check_action_enabled()
+
class ViewComponentInSfdcCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ViewComponentInSfdcCommand, self).__init__(*args, **kwargs)
@@ -1484,6 +1515,7 @@ def on_done(self, index):
startURL = "/" + class_id
self.window.run_command("login_to_sfdc", {"startURL": startURL})
+
class PreviewPageCommand(sublime_plugin.TextCommand):
def run(self, view):
startURL = "/apex/" + self.attributes["name"]
@@ -1497,6 +1529,7 @@ def is_visible(self):
return util.check_enabled(self.view.file_name())
+
class RunOneTestCommand(sublime_plugin.WindowCommand):
""" List the test classes from local cache, after any one is chosen,
get the attribute of the chosen class and run test,
@@ -1520,6 +1553,7 @@ class RunOneTestCommand(sublime_plugin.WindowCommand):
...
}
"""
+
def __init__(self, *args, **kwargs):
super(RunOneTestCommand, self).__init__(*args, **kwargs)
@@ -1545,6 +1579,7 @@ def on_done(self, index):
class_id = self.classes_attr[key]["id"]
processor.handle_run_test(class_name, class_id)
+
class FetchOrgWideCoverageCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(FetchOrgWideCoverageCommand, self).__init__(*args, **kwargs)
@@ -1552,6 +1587,7 @@ def __init__(self, *args, **kwargs):
def run(self):
pass
+
class ChooseTestClasses(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ChooseTestClasses, self).__init__(*args, **kwargs)
@@ -1566,7 +1602,8 @@ def run(self, callback_options={}):
self.classes_attr = util.populate_components("ApexClass")
self.classmap = {}
- selected_items = []; unselected_items = []
+ selected_items = [];
+ unselected_items = []
for key, item in self.classes_attr.items():
if not item["is_test"]:
continue
@@ -1578,8 +1615,8 @@ def run(self, callback_options={}):
cname = item["name"]
classItem = "%s[%s] %s" % (
- " " * 4,
- "√" if cname in self.chosen_classes else "x",
+ " " * 4,
+ "√" if cname in self.chosen_classes else "x",
cname
)
if cname in self.chosen_classes:
@@ -1595,7 +1632,7 @@ def run(self, callback_options={}):
org_name=settings["default_project_name"]
)
);
-
+
# Add `All` Item
allItem = "[%s] All" % (
"√" if self.chosen_classes else "x"
@@ -1612,8 +1649,8 @@ def run(self, callback_options={}):
if hasattr(self, "index"):
selected_index = self.index
- self.window.show_quick_panel(self.items, self.on_done,
- sublime.MONOSPACE_FONT, selected_index)
+ self.window.show_quick_panel(self.items, self.on_done,
+ sublime.MONOSPACE_FONT, selected_index)
def on_done(self, index):
if index == -1:
@@ -1653,6 +1690,7 @@ def on_done(self, index):
callback_options=self.callback_options
), 10)
+
class RunSyncTests(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(RunSyncTests, self).__init__(*args, **kwargs)
@@ -1666,6 +1704,7 @@ def run(self, chosen_classes=[]):
})
processor.handle_run_sync_test(chosen_classes)
+
class RunSyncTest(sublime_plugin.TextCommand):
def run(self, edit):
tests = [];
@@ -1679,9 +1718,9 @@ def run(self, edit):
def is_enabled(self):
# Get current file name and Read file content
file_name = self.view.file_name()
- if not file_name or not file_name.endswith(".cls"):
+ if not file_name or not file_name.endswith(".cls"):
return False
- if not util.check_enabled(file_name):
+ if not util.check_enabled(file_name):
return False
# Test class must be class firstly
@@ -1699,12 +1738,12 @@ def is_enabled(self):
component_attribute["namespacePrefix"],
self.cname
)
-
+
for region in self.view.sel():
sel = self.view.substr(self.view.word(region.begin()))
if sel and not sel.isspace() and not re.compile(r'^[a-zA-Z0-9_]*$').match(sel.strip()):
return False
-
+
return True
def is_visible(self):
@@ -1715,6 +1754,7 @@ class RunAsyncTest(sublime_plugin.WindowCommand):
"""
@deprecated
"""
+
def __init__(self, *args, **kwargs):
super(RunAsyncTest, self).__init__(*args, **kwargs)
@@ -1733,7 +1773,7 @@ def is_enabled(self, files):
continue
self.class_ids.append(component_attribute["id"])
-
+
return len(self.class_ids) > 0
def is_visible(self):
@@ -1744,6 +1784,7 @@ class RunTestCommand(sublime_plugin.TextCommand):
"""
Run Async Test
"""
+
def run(self, view):
# Get component_attribute by file_name
attributes = util.get_file_attributes(self.view.file_name())
@@ -1755,9 +1796,9 @@ def run(self, view):
def is_enabled(self):
# Get current file name and Read file content
file_name = self.view.file_name()
- if not file_name or not file_name.endswith(".cls"):
+ if not file_name or not file_name.endswith(".cls"):
return False
- if not util.check_enabled(file_name):
+ if not util.check_enabled(file_name):
return False
# Test class must be class firstly
@@ -1773,6 +1814,7 @@ def is_enabled(self):
def is_visible(self):
return self.is_enabled()
+
class TrackAllDebugLogs(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(TrackAllDebugLogs, self).__init__(*args, **kwargs)
@@ -1783,6 +1825,7 @@ def run(self):
if sublime.ok_cancel_dialog("Confirm to track logs for all users?", "Continue"):
processor.handle_track_all_debug_logs_thread(users)
+
class TrackDebugLog(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(TrackDebugLog, self).__init__(*args, **kwargs)
@@ -1791,7 +1834,7 @@ def run(self, track_self=False):
if track_self:
processor.handle_create_debug_log('Me', None)
return
-
+
self.users = processor.handle_populate_users("track_debug_log")
if not self.users: return
self.users_name = sorted(self.users.keys(), reverse=False)
@@ -1804,6 +1847,7 @@ def on_done(self, index):
user_id = self.users[user_name]
processor.handle_create_debug_log(user_name, user_id)
+
class FetchDebugLogCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(FetchDebugLogCommand, self).__init__(*args, **kwargs)
@@ -1814,7 +1858,7 @@ def run(self, fetch_self=False):
return
self.users = processor.handle_populate_users("fetch_debug_log")
- if not self.users: return # Network Issue Cause
+ if not self.users: return # Network Issue Cause
self.users_name = sorted(self.users.keys(), reverse=False)
self.window.show_quick_panel(self.users_name, self.on_done)
@@ -1858,7 +1902,7 @@ def run(self, edit):
# get file content, may be apex class or trigger
class_path = os.path.join(work_dir, 'src',
- 'classes', self.file_name+'.cls')
+ 'classes', self.file_name + '.cls')
trigger_path = os.path.join(work_dir, 'src',
'triggers', self.file_name + '.trigger')
_path = class_path if os.path.isfile(class_path) else trigger_path
@@ -1888,6 +1932,7 @@ def is_enabled(self):
def is_visible(self):
return self.view.name() == 'Test Result'
+
class ViewDebugOnly(sublime_plugin.TextCommand):
def run(self, view):
whole_region = sublime.Region(0, self.view.size())
@@ -1908,6 +1953,7 @@ def run(self, view):
def is_enabled(self):
return self.view.settings().get("is_debug_log") is True
+
class ExecuteQuery(sublime_plugin.TextCommand):
def run(self, view):
sublime.active_window().run_command("haoku", {
@@ -1923,6 +1969,7 @@ def is_enabled(self):
return True
+
class ExecuteAnonymousCommand(sublime_plugin.TextCommand):
def run(self, view):
processor.handle_execute_anonymous(self.selection)
@@ -1934,6 +1981,7 @@ def is_enabled(self):
return True
+
class ViewIdInSfdcWebCommand(sublime_plugin.TextCommand):
def run(self, view):
startURL = "/" + self.record_id
@@ -1942,7 +1990,7 @@ def run(self, view):
if self.record_id.startswith("07L"):
startURL = "/p/setup/layout/ApexDebugLogDetailEdit/d?apex_log_id=" + self.record_id
-
+
self.view.window().run_command("login_to_sfdc", {"startURL": startURL})
def is_enabled(self):
@@ -1951,7 +1999,7 @@ def is_enabled(self):
self.record_id = self.view.substr(self.view.sel()[0])
else:
self.record_id = self.view.substr(self.view.sel()[0]).encode("utf-8")
-
+
if len(self.record_id) != 15 and len(self.record_id) != 18:
return False
@@ -1960,6 +2008,7 @@ def is_enabled(self):
return True
+
class ShowInSfdcWebCommand(sublime_plugin.TextCommand):
def run(self, view):
# Get file_name and component_attribute
@@ -1972,6 +2021,7 @@ def run(self, view):
def is_enabled(self):
return util.check_enabled(self.view.file_name())
+
class LoginToSfdcCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(LoginToSfdcCommand, self).__init__(*args, **kwargs)
@@ -1982,17 +2032,17 @@ def run(self, startURL="", copy_url=False):
session = util.get_session_info(settings)
# If .config/session.json is not exist, login firstly
- if not session:
- return self.window.run_command('login',
- {
- "callback_options": {
- "callback_command": "login_to_sfdc",
- "args": {
- "startURL": startURL
- }
- }
- }
- )
+ if not session:
+ return self.window.run_command('login',
+ {
+ "callback_options": {
+ "callback_command": "login_to_sfdc",
+ "args": {
+ "startURL": startURL
+ }
+ }
+ }
+ )
# If .config/session.json is exist, use frontdoor method
show_url = "%s/secur/frontdoor.jsp?sid=%s&retURL=%s" % (
@@ -2004,6 +2054,7 @@ def run(self, startURL="", copy_url=False):
sublime.set_clipboard(show_url)
+
class AboutCommand(sublime_plugin.ApplicationCommand):
def run(command):
package_info = sublime.load_settings("package.sublime-settings")
@@ -2016,21 +2067,25 @@ def run(command):
)
sublime.message_dialog(version_info)
+
class ReportIssueCommand(sublime_plugin.ApplicationCommand):
def run(command):
package_info = sublime.load_settings("package.sublime-settings")
util.open_with_browser(package_info.get("issue_url"))
+
class HaoideHelp(sublime_plugin.ApplicationCommand):
def run(command, url=""):
package_info = sublime.load_settings("package.sublime-settings")
util.open_with_browser(package_info.get("homepage") + url)
+
class ReleaseNotesCommand(sublime_plugin.ApplicationCommand):
def run(command):
package_info = sublime.load_settings("package.sublime-settings")
util.open_with_browser(package_info.get("history_url"))
+
class DeleteFilesFromServer(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(DeleteFilesFromServer, self).__init__(*args, **kwargs)
@@ -2058,11 +2113,12 @@ def is_visible(self, files):
return True
+
class DeleteFileFromServer(sublime_plugin.TextCommand):
def run(self, view):
files = [self.view.file_name()]
self.view.window().run_command("delete_files_from_server", {
- "files" : [self.view.file_name()]
+ "files": [self.view.file_name()]
})
def is_enabled(self):
@@ -2073,20 +2129,21 @@ def is_enabled(self):
attr = util.get_component_attribute(self.file_name)[0]
if not attr or "url" not in attr:
return False
-
+
return True
def is_visible(self):
return self.is_enabled()
+
class CreateApexTriggerCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CreateApexTriggerCommand, self).__init__(*args, **kwargs)
def run(self):
sobjects_describe = util.populate_sobjects_describe()
- self.sobjects = sorted([name for name in sobjects_describe\
- if "triggerable" in sobjects_describe[name] and sobjects_describe[name]["triggerable"]])
+ self.sobjects = sorted([name for name in sobjects_describe \
+ if "triggerable" in sobjects_describe[name] and sobjects_describe[name]["triggerable"]])
self.window.show_quick_panel(self.sobjects, self.on_done)
def on_done(self, index):
@@ -2100,6 +2157,7 @@ def on_done(self, index):
def is_enabled(self):
return util.check_action_enabled()
+
class CreateApexPageCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CreateApexPageCommand, self).__init__(*args, **kwargs)
@@ -2113,6 +2171,7 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class CreateApexComponentCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CreateApexComponentCommand, self).__init__(*args, **kwargs)
@@ -2126,6 +2185,7 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class CreateApexClassCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CreateApexClassCommand, self).__init__(*args, **kwargs)
@@ -2139,6 +2199,7 @@ def run(self):
def is_enabled(self):
return util.check_action_enabled()
+
class CreateStaticResource(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(CreateStaticResource, self).__init__(*args, **kwargs)
@@ -2152,36 +2213,36 @@ def on_choose(self, index):
if index == -1: return
self.content_type = self.content_types[index]
-
+
self.input_name_message = "Please Input StaticResource Name: "
- self.window.show_input_panel(self.input_name_message,
- "", self.on_input_name, None, None)
+ self.window.show_input_panel(self.input_name_message,
+ "", self.on_input_name, None, None)
def on_input_name(self, input):
# Create component to local according to user input
if not re.match('^[a-zA-Z]+\\w+$', input):
message = 'Invalid name, do you want to try again?'
if not sublime.ok_cancel_dialog(message, "Try Again?"): return
- self.window.show_input_panel(self.input_name_message,
- "", self.on_input_name, None, None)
+ self.window.show_input_panel(self.input_name_message,
+ "", self.on_input_name, None, None)
return
-
+
self.resource_name = input
# Input file location
self.input_location_message = "Please Input File or Path for StaticResource: "
- self.window.show_input_panel(self.input_location_message,
- "", self.on_input_location, None, None)
+ self.window.show_input_panel(self.input_location_message,
+ "", self.on_input_location, None, None)
def on_input_location(self, location):
# Get file or path from user input, allow trying agin
if not os.path.exists(location) and not os.path.isfile(location):
if not sublime.ok_cancel_dialog("Invalid file or path", "Try Again?"):
return
- self.window.show_input_panel(self.input_location_message,
- "", self.on_input_location, None, None)
+ self.window.show_input_panel(self.input_location_message,
+ "", self.on_input_location, None, None)
return
-
+
if os.path.isfile(location):
body = open(location, "r").read()
@@ -2197,15 +2258,20 @@ def on_input_location(self, location):
def is_enabled(self):
return util.check_action_enabled()
+
class CreateComponentCommand(sublime_plugin.WindowCommand):
+ """
+ Create Apex Class/Trigger/Page/Component via Tooling API
+ """
+
def __init__(self, *args, **kwargs):
super(CreateComponentCommand, self).__init__(*args, **kwargs)
def run(self, template_name=None,
- component_name=None,
- component_type=None,
- markup_or_body=None,
- sobject_name=None):
+ component_name=None,
+ component_type=None,
+ markup_or_body=None,
+ sobject_name=None):
self.template_name = template_name
self.component_name = component_name
self.component_type = component_type
@@ -2219,7 +2285,7 @@ def run(self, template_name=None,
# After input # in visualforce page, we can get
# the component name and template name, no need to choose again
- if self.component_name and self.template_name:
+ if self.component_name and self.template_name:
self.template_attr = templates[self.template_name]
self.create_component()
else:
@@ -2251,7 +2317,7 @@ def on_input(self, input):
if not sublime.ok_cancel_dialog(message, "Try Again?"): return
self.window.show_input_panel("Please Input Name: ", "", self.on_input, None, None)
return
-
+
self.component_name = input
self.create_component()
@@ -2276,8 +2342,8 @@ def create_component(self):
file_name = "%s/%s" % (component_outputdir, self.component_name + extension)
if os.path.isfile(file_name):
- message = '"%s" is already exist, do you want to try again?' % self.component_name
- if not sublime.ok_cancel_dialog(message, "Continue?"):
+ _message = '"%s" is already exist, do you want to try again?' % self.component_name
+ if not sublime.ok_cancel_dialog(_message, "Continue?"):
self.window.open_file(file_name)
return
self.window.show_input_panel("Please Input Name: ", "", self.on_input, None, None)
@@ -2292,7 +2358,7 @@ def create_component(self):
# Build Post body
data = {
- "name": self.component_name,
+ "name": self.component_name,
self.markup_or_body: body
}
@@ -2303,22 +2369,23 @@ def create_component(self):
elif self.component_type in ["ApexPage", "ApexComponent"]:
data["MasterLabel"] = self.component_name
- processor.handle_create_component(data, self.component_name,
- self.component_type,
- self.markup_or_body,
- file_name)
+ processor.handle_create_component(data, self.component_name,
+ self.component_type,
+ self.markup_or_body,
+ file_name)
class SaveToServer(sublime_plugin.TextCommand):
"""
- Save Metadata to Server using Tooling API or Metadata API
+ Save Metadata to Server using Tooling API
"""
+
def run(self, edit, is_check_only=False):
# Check whether need confirm
settings = context.get_settings()
if settings["confirm_on_save"]:
message = "Confirm to continue save operation?"
- if not sublime.ok_cancel_dialog(message, "Save to Server?"):
+ if not sublime.ok_cancel_dialog(message, "Save to Server?"):
return
# Automatically save current file if dirty
@@ -2334,12 +2401,13 @@ def is_enabled(self):
attributes = util.get_file_attributes(self.view.file_name())
if attributes["metadata_folder"] not in ["classes", "components", "pages", "triggers", "aura", "lwc"]:
return False
-
+
return util.check_enabled(self.view.file_name())
def is_visible(self):
return self.is_enabled()
+
class ViewFileAttributes(sublime_plugin.TextCommand):
def run(self, edit):
view = sublime.active_window().new_file()
@@ -2355,9 +2423,10 @@ def is_enabled(self):
self.component_attribute, self.cname = util.get_component_attribute(self.file_name)
if not self.component_attribute:
return False
-
+
return True
+
class SwitchProjectCommand(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(SwitchProjectCommand, self).__init__(*args, **kwargs)
@@ -2369,8 +2438,8 @@ def run(self, callback_options={}):
for k, v in settings["projects"].items():
if not v.get("hidden_in_project_list", False):
projects[k] = v
- self.projects = ["(" + ('Active' if projects[p]["default"] else
- 'Inactive') + ") " + p for p in projects]
+ self.projects = ["(" + ('Active' if projects[p]["default"] else
+ 'Inactive') + ") " + p for p in projects]
self.projects = sorted(self.projects, reverse=False)
self.window.show_quick_panel(self.projects, self.on_done)
@@ -2397,6 +2466,7 @@ def on_done(self, index):
args = self.callback_options["args"] if "args" in self.callback_options else {}
self.window.run_command(callback_command, args)
+
class Login(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(Login, self).__init__(*args, **kwargs)
@@ -2404,6 +2474,7 @@ def __init__(self, *args, **kwargs):
def run(self, callback_options={}, force=False):
processor.handle_login_thread(callback_options, force=force)
+
class UpdateUserLanguage(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(UpdateUserLanguage, self).__init__(*args, **kwargs)
@@ -2423,13 +2494,14 @@ def on_choose(self, index):
def is_enabled(self):
return util.check_action_enabled()
+
class EnableDevelopmentMode(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(EnableDevelopmentMode, self).__init__(*args, **kwargs)
def run(self):
self.users = processor.handle_populate_users("enable_development_mode")
- if not self.users: return # Network Issue Cause
+ if not self.users: return # Network Issue Cause
self.users_name = sorted(self.users.keys(), reverse=False)
self.window.show_quick_panel(self.users_name, self.on_done)
@@ -2443,13 +2515,14 @@ def on_done(self, index):
def is_enabled(self):
return util.check_action_enabled()
+
class UpdateUserPassword(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(UpdateUserPassword, self).__init__(*args, **kwargs)
def run(self):
self.users = processor.handle_populate_users("update_user_password")
- if not self.users: return # Network Issue Cause
+ if not self.users: return # Network Issue Cause
self.users_name = sorted(self.users.keys(), reverse=False)
self.window.show_quick_panel(self.users_name, self.on_done)
@@ -2459,15 +2532,15 @@ def on_done(self, index):
user_name = self.users_name[index]
self.user_id = self.users[user_name]
- sublime.active_window().show_input_panel("Input New Password: ",
- "", self.on_input, None, None)
+ sublime.active_window().show_input_panel("Input New Password: ",
+ "", self.on_input, None, None)
def on_input(self, password):
if not re.match('[\s\S]{5,22}', password):
message = 'Invalid password, do you want to try again?'
if not sublime.ok_cancel_dialog(message, "Try Again?"): return
- return sublime.active_window().show_input_panel("Input New Password: ",
- "", self.on_input, None, None)
+ return sublime.active_window().show_input_panel("Input New Password: ",
+ "", self.on_input, None, None)
processor.handle_update_user_password(self.user_id, password)
@@ -2523,14 +2596,15 @@ def run(self):
"callback_command": "create_new_project"
}
})
-
+
dpn = settings["default_project"]["project_name"]
message = "Are you sure you really want to create new project for %s?" % dpn
if not sublime.ok_cancel_dialog(message, "Create New Project?"): return
-
+
util.add_project_to_workspace(settings)
processor.handle_new_project()
+
class DescribeMetadata(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(DescribeMetadata, self).__init__(*args, **kwargs)
@@ -2538,6 +2612,7 @@ def __init__(self, *args, **kwargs):
def run(self, callback_options={}):
processor.handle_describe_metadata(callback_options)
+
class ExtractToHere(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(ExtractToHere, self).__init__(*args, **kwargs)
@@ -2555,12 +2630,13 @@ def run(self, files):
Printer.get("log").write_start().write("Extracted to " + extract_to)
def is_visible(self, files):
- if not files or len(files) > 1:
+ if not files or len(files) > 1:
return False
self._file = files[0]
return zipfile.is_zipfile(self._file)
+
class UpdateStaticResource(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(UpdateStaticResource, self).__init__(*args, **kwargs)
@@ -2579,6 +2655,7 @@ def is_visible(self, dirs):
return True
+
class RefreshFileFromServer(sublime_plugin.TextCommand):
def run(self, view):
self.view.window().run_command("refresh_files_from_server", {
@@ -2589,7 +2666,7 @@ def is_enabled(self):
file_name = self.view.file_name()
if not file_name: return False
attr = util.get_component_attribute(file_name)[0]
- if not attr or "url" not in attr:
+ if not attr or "url" not in attr:
return False
return True
@@ -2597,6 +2674,7 @@ def is_enabled(self):
def is_visible(self):
return self.is_enabled()
+
class RefreshFilesFromServer(sublime_plugin.WindowCommand):
def __init__(self, *args, **kwargs):
super(RefreshFilesFromServer, self).__init__(*args, **kwargs)
@@ -2606,7 +2684,7 @@ def run(self, files):
if not sublime.ok_cancel_dialog(message, "Refresh Files?"): return
for file_name in files:
- if file_name.endswith("-meta.xml"): continue # Ignore -meta.xml file
+ if file_name.endswith("-meta.xml"): continue # Ignore -meta.xml file
attr = util.get_component_attribute(file_name)[0]
# Handle Refresh Current Component
@@ -2624,4 +2702,4 @@ def is_visible(self, files):
if not attr or "url" not in attr:
return False
- return True
\ No newline at end of file
+ return True
diff --git a/processor.py b/processor.py
index 88972a4..82f744d 100644
--- a/processor.py
+++ b/processor.py
@@ -525,16 +525,27 @@ def handle_thread(api, thread, timeout=120):
def handle_destructive_files(dirs_or_files, ignore_folder=True, timeout=120):
- def handle_thread(thread, timeout=120):
+ """
+ Destruct File(s) from Salesforce org and remove from local disk via Metadata API
+ @param dirs_or_files: lightning direcotry(bundle) or files
+ @param ignore_folder: ignore the folder itself
+ @param timeout: timeout in second
+ @return: None
+ """
+
+ def handle_destruct_thread(thread, timeout=120):
if thread.is_alive():
- sublime.set_timeout(lambda: handle_thread(thread, timeout), timeout)
+ sublime.set_timeout(lambda: handle_destruct_thread(thread, timeout), timeout)
return
# After succeed, remove dirs_or_files and related *-meta.xml from local
if "body" in api.result and api.result["body"]["status"] == "Succeeded":
+ # Remove Component metadata cache
+ util.delete_component_attribute(dirs_or_files)
+
+ # Remove file from local disk and close the related view
win = sublime.active_window()
for _file_or_dir in dirs_or_files:
- # Remove file from local disk and close the related view
view = util.get_view_by_file_name(_file_or_dir)
if view:
win.focus_view(view)
@@ -560,7 +571,7 @@ def handle_thread(thread, timeout=120):
thread = threading.Thread(target=api.deploy, args=(base64_encoded_zip,))
thread.start()
ThreadProgress(api, thread, "Destructing Files", "Destructing Files Succeed")
- handle_thread(thread, timeout)
+ handle_destruct_thread(thread, timeout)
def handle_destructive_package_xml(types, timeout=120):
@@ -578,8 +589,18 @@ def handle_thread(thread, timeout=120):
handle_thread(thread, timeout)
-def handle_deploy_thread(base64_encoded_zip,
- source_org=None, element=None, chosen_classes=[], timeout=120, update_meta=False):
+def handle_deploy_thread(base64_encoded_zip, source_org=None, element=None,
+ chosen_classes=[], timeout=120, update_meta=False):
+ """
+ Deploy code to specified Salesforce org via Metadata API
+ @param base64_encoded_zip: code content in base64 encoded
+ @param source_org: destination Salesforce org
+ @param element: aura element in [Application, Component, Event, Controller, Helper,etc.]
+ @param chosen_classes:
+ @param timeout: timeout in second
+ @param update_meta: whether update component metadata after deployed
+ @return: None
+ """
def handle_thread(thread, timeout=120):
if thread.is_alive():
sublime.set_timeout(lambda: handle_thread(thread, timeout), timeout)
@@ -608,9 +629,9 @@ def handle_thread(thread, timeout=120):
def handle_update_lightning_meta(body, element, timeout=120):
"""
-
+ Update lightning aura/web component metadata via Tooling API after creation
:param body: body data returned from SOAP API
- :param element: Aura Bunlde type in `COMPONENT`, `CONTROLLER`, `HELPER`, `SVG`...
+ :param element: Aura bundle type in `COMPONENT`, `CONTROLLER`, `HELPER`, `SVG`...
:param timeout: timeout in second
:param cmp_type: type
:return:
@@ -640,8 +661,6 @@ def handle_thread(thread, full_name, timeout):
"DefType": record["DefType"]
}
components_dict[bundle_type][full_name.lower()] = cmp_meta
- s.set(username, components_dict)
- sublime.save_settings(context.COMPONENT_METADATA_SETTINGS)
elif bundle_type == "LightningComponentBundle":
# save multiple Lightning Component Resource files
for record in result["records"]:
@@ -655,11 +674,13 @@ def handle_thread(thread, full_name, timeout):
"type": bundle_type
}
components_dict[bundle_type][full_name.lower()] = cmp_meta
+
+ # Save and reload component metadata
+ if result["totalSize"] >= 1:
s.set(username, components_dict)
sublime.save_settings(context.COMPONENT_METADATA_SETTINGS)
-
- # Refresh metadata settings
- sublime.set_timeout(lambda: util.load_metadata_cache(True, settings["username"]), 5)
+ # Refresh metadata settings
+ sublime.set_timeout(lambda: util.load_metadata_cache(True, settings["username"]), 5)
settings = context.get_settings()
username = settings["username"]
@@ -1445,7 +1466,7 @@ def handle_thread(thread, timeout):
# Makedir for subscribed meta types
for metadata_folder in settings["subscribed_metadata_folders"]:
- outputdir = os.path.join(extract_to, "src", metadata_folder);
+ outputdir = os.path.join(extract_to, "src", metadata_folder)
if not os.path.exists(outputdir): os.makedirs(outputdir)
# Extract the zipFile to extract_to
@@ -1604,6 +1625,15 @@ def handle_thread(thread, timeout):
def handle_retrieve_package(types, extract_to, source_org=None, ignore_package_xml=False, timeout=120):
+ """
+ Retrieve package via Metadata API
+ @param types: metadata type dict like {"AuraDefinitionBundle":["aura1", "aura2"]}
+ @param extract_to: target settings["workspace"]
+ @param source_org: source Salesforce org
+ @param ignore_package_xml: ignore package xml file
+ @param timeout: timeout in seconds
+ @return: None
+ """
def handle_thread(_thread, timeout):
if _thread.is_alive():
sublime.set_timeout(lambda: handle_thread(_thread, timeout), timeout)
@@ -1619,7 +1649,7 @@ def handle_thread(_thread, timeout):
args=(api.result["zipFile"], extract_to, ignore_package_xml,))
_thread.start()
- # Code Cache
+ # Code Cache in component metadata settings for saving file to server functionality
# print("fileProperties:", api.result.get("fileProperties", None))
if isinstance(api.result.get("fileProperties", None), list):
util.reload_file_attributes(
@@ -1640,7 +1670,7 @@ def handle_thread(_thread, timeout):
def handle_save_to_server(file_name, is_check_only=False, timeout=120):
"""
- Handle Save metadata to Salesforce
+ Handle Save metadata to Salesforce via Tooling API
@param file_name: file name with path format
@param is_check_only: only check the file from Salesforce, do not really save
@param timeout: timeout in seconds
@@ -1675,8 +1705,8 @@ def handle_thread(thread, timeout):
# Backup current file
time_stamp = time.strftime("%Y-%m-%d-%H-%M", time.localtime())
- outputdir = workspace + "/" + component_name + "-" + time_stamp + "-history" + extension
- with open(outputdir, "wb") as fp:
+ output_dir = workspace + "/" + component_name + "-" + time_stamp + "-history" + extension
+ with open(output_dir, "wb") as fp:
fp.write(body.encode("utf-8"))
# Output succeed message in the console
@@ -1709,6 +1739,7 @@ def handle_thread(thread, timeout):
# If not succeed, just go to the error line
# Because error line in page is always at the line 1, so just work in class or trigger
elif "success" in result and not result["success"]:
+ print('error result', result)
# Maybe network issue
_message = "Unknown Problem!"
if "problem" in result:
@@ -1763,6 +1794,7 @@ def handle_thread(thread, timeout):
component_id = component_attribute["id"]
view.run_command("set_check_point", {"mark": component_id + "build_error"})
+ # 1. Get component attribute and body content
component_attribute, component_name = util.get_component_attribute(file_name)
body = open(file_name, encoding="utf-8").read()
@@ -1779,7 +1811,7 @@ def handle_thread(thread, timeout):
if username + file_base_name in globals():
is_thread_alive = globals()[username + file_base_name]
if is_thread_alive:
- print('%s is in process' % file_base_name);
+ print('%s is in process' % file_base_name)
return
# Open panel
@@ -1807,6 +1839,16 @@ def handle_thread(thread, timeout):
def handle_create_component(data, component_name, component_type, markup_or_body, file_name, timeout=120):
+ """
+ Handle create Apex Class/Trigger/Page/Component via Tooling API
+ @param data: component data to create, dict like {"name": "Aclass.cls", "body": content_body}
+ @param component_name: component name without extension
+ @param component_type: component type in [ApexClass, ApexPage, ApexTrigger, ApexComponent]
+ @param markup_or_body: content of the code
+ @param file_name: os file path
+ @param timeout: timeout in second
+ @return: None
+ """
def handle_thread(thread, timeout):
if thread.is_alive():
sublime.set_timeout(lambda: handle_thread(thread, timeout), timeout)
@@ -1850,7 +1892,7 @@ def handle_thread(thread, timeout):
"type": component_type,
"is_test": lower_name.startswith("test") or lower_name.endswith("test")
}
- components_dict[component_type][fullName.lower()] = attributes
+ components_dict[component_type][full_name.lower()] = attributes
s.set(username, components_dict)
# Save settings and show success message
@@ -1886,9 +1928,9 @@ def handle_thread(thread, timeout):
post_url = "/sobjects/" + component_type
thread = threading.Thread(target=api.post, args=(post_url, data,))
thread.start()
- fullName = os.path.basename(file_name)
- ThreadProgress(api, thread, "Creating Component %s" % fullName,
- "Creating Component %s Succeed" % fullName)
+ full_name = os.path.basename(file_name)
+ ThreadProgress(api, thread, "Creating Component %s" % full_name,
+ "Creating Component %s Succeed" % full_name)
handle_thread(thread, timeout)
diff --git a/salesforce/api/tooling.py b/salesforce/api/tooling.py
index ff09e9a..864b627 100644
--- a/salesforce/api/tooling.py
+++ b/salesforce/api/tooling.py
@@ -13,6 +13,7 @@
from ..login import soap_login, rest_login
from ..lib.panel import Printer
+
class ToolingApi():
def __init__(self, settings, **kwargs):
self.settings = settings
@@ -90,7 +91,7 @@ def parse_response(self, res):
try:
response_result = res.json()
if isinstance(response_result, list):
- response_result = response_result[0]
+ response_result = response_result[0]
except:
response_result = {"Error Message": res.text}
response_result["success"] = False
@@ -108,7 +109,7 @@ def parse_response(self, res):
response_result["success"] = True
return response_result
-
+
def head(self, component_url, timeout=120):
""" 'head' request
@@ -124,11 +125,11 @@ def head(self, component_url, timeout=120):
url = self.parse_url(component_url)
try:
- response = requests.head(url, verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.head(url, verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issue REST HEAD request",
+ "Error Message": "Network connection timeout when issue REST HEAD request",
"success": False
}
return self.result
@@ -140,7 +141,7 @@ def head(self, component_url, timeout=120):
self.result = result
return self.result
return self.head(component_url)
-
+
result = self.parse_response(response)
self.result = result
@@ -165,11 +166,11 @@ def get(self, component_url, timeout=120):
headers["Accept-Encoding"] = 'identity, deflate, compress, gzip'
try:
- response = requests.get(url, data=None, verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.get(url, data=None, verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing REST GET request",
+ "Error Message": "Network connection timeout when issuing REST GET request",
"success": False
}
return self.result
@@ -181,7 +182,7 @@ def get(self, component_url, timeout=120):
self.result = result
return self.result
return self.get(component_url)
-
+
result = self.parse_response(response)
self.result = result
@@ -203,13 +204,13 @@ def put(self, put_url, data, timeout=120):
return self.result
url = self.parse_url(put_url)
-
+
try:
- response = requests.put(url, data=json.dumps(data), verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.put(url, data=json.dumps(data), verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network Network connection timeout when issuing REST PUT request",
+ "Error Message": "Network Network connection timeout when issuing REST PUT request",
"success": False
}
return self.result
@@ -221,7 +222,7 @@ def put(self, put_url, data, timeout=120):
self.result = result
return self.result
return self.put(put_url, data)
-
+
result = self.parse_response(response)
self.result = result
@@ -243,13 +244,13 @@ def patch(self, patch_url, data, timeout=120):
return self.result
url = self.parse_url(patch_url)
-
+
try:
- response = requests.patch(url, data=json.dumps(data), verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.patch(url, data=json.dumps(data), verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing REST PATCH request",
+ "Error Message": "Network connection timeout when issuing REST PATCH request",
"success": False
}
return self.result
@@ -261,7 +262,7 @@ def patch(self, patch_url, data, timeout=120):
self.result = result
return self.result
return self.patch(patch_url, data)
-
+
result = self.parse_response(response)
self.result = result
@@ -283,13 +284,13 @@ def post(self, post_url, data, timeout=120):
return self.result
url = self.parse_url(post_url)
-
+
try:
- response = requests.post(url, data=json.dumps(data), verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.post(url, data=json.dumps(data), verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing REST POST request",
+ "Error Message": "Network connection timeout when issuing REST POST request",
"success": False
}
return self.result
@@ -301,7 +302,7 @@ def post(self, post_url, data, timeout=120):
self.result = result
return self.result
return self.post(post_url, data)
-
+
result = self.parse_response(response)
self.result = result
@@ -322,13 +323,13 @@ def delete(self, component_url, timeout=120):
return self.result
url = self.parse_url(component_url)
-
+
try:
- response = requests.delete(url, data=None, verify=False,
- headers=self.headers, timeout=timeout)
+ response = requests.delete(url, data=None, verify=False,
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing REST DELETE request",
+ "Error Message": "Network connection timeout when issuing REST DELETE request",
"success": False
}
return self.result
@@ -364,13 +365,13 @@ def search(self, sosl, timeout=120):
return self.result
url = self.base_url + "/search"
- params = {'q' : sosl}
+ params = {'q': sosl}
try:
- response = requests.get(url, headers=self.headers, verify=False,
- params=params, timeout=timeout)
+ response = requests.get(url, headers=self.headers, verify=False,
+ params=params, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing REST SEARCH request",
+ "Error Message": "Network connection timeout when issuing REST SEARCH request",
"success": False
}
return self.result
@@ -402,11 +403,11 @@ def quick_search(self, sosl_string, timeout=120):
# We need to escape this special character,
# Don't know why ?, _
for ch in ["-", "?", "*"]:
- sosl_string = sosl_string.replace(ch, "\\"+ch)
+ sosl_string = sosl_string.replace(ch, "\\" + ch)
sosl_string = 'FIND {%s}' % sosl_string
result = self.search(sosl_string)
-
+
self.result = result
return self.result
@@ -439,14 +440,14 @@ def query(self, soql, is_toolingapi=False, timeout=120):
# Just API 28 above support CustomField
url = self.base_url + ("/tooling" if is_toolingapi else "") + "/query"
- params = {'q' : soql}
+ params = {'q': soql}
try:
response = requests.get(url, data=None, verify=False, params=params,
- headers=self.headers, timeout=timeout)
+ headers=self.headers, timeout=timeout)
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing QUERY request",
+ "Error Message": "Network connection timeout when issuing QUERY request",
"success": False
}
return self.result
@@ -484,7 +485,7 @@ def get_all_result(previous_result):
if not self.login(): return
result = self.query(soql, is_toolingapi)
-
+
# Database.com not support ApexComponent
if not result["success"]:
self.result = result
@@ -513,27 +514,27 @@ def query_symbol_table(self, split=30):
if not result["success"]:
self.result = result
return self.result
-
+
offset = 0
result = {"totalSize": 0, "records": [], "success": result["success"]}
describe_metadata = util.get_described_metadata(self.settings)
namespacePrefix = describe_metadata.get("organizationNamespace", '')
- soql = "SELECT NamespacePrefix, SymbolTable, Name FROM ApexClass " +\
- "WHERE NamespacePrefix = %s " % (
+ soql = "SELECT NamespacePrefix, SymbolTable, Name FROM ApexClass " + \
+ "WHERE NamespacePrefix = %s " % (
"'%s'" % namespacePrefix if namespacePrefix else 'null'
- )
+ )
while True:
query = soql + "LIMIT %s OFFSET %s""" % (split, offset)
previous_result = self.query(query, is_toolingapi=True)
- if not previous_result["success"]: continue # Ignore exception
- if previous_result["size"] == 0: break # No result
+ if not previous_result["success"]: continue # Ignore exception
+ if previous_result["size"] == 0: break # No result
if self.settings["debug_mode"]:
- print ('SOQL: %s, ' % query, 'totalSize: %s' % previous_result["size"])
+ print('SOQL: %s, ' % query, 'totalSize: %s' % previous_result["size"])
result['totalSize'] += previous_result['totalSize']
previous_result['records'].extend(result['records'])
result['records'] = previous_result['records']
offset += split
-
+
# Invoke for thread
self.result = result
@@ -553,9 +554,9 @@ def query_logs(self, last_n_logs, user_id=None):
if not user_id: user_id = self.session["user_id"]
# Query self logs
- soql = "SELECT Id,LogUserId,LogLength,Request,Operation,Application," +\
- "Status,DurationMilliseconds,StartTime,Location FROM ApexLog " +\
- "WHERE LogUserId='%s' ORDER BY StartTime DESC LIMIT %s" % (user_id, last_n_logs)
+ soql = "SELECT Id,LogUserId,LogLength,Request,Operation,Application," + \
+ "Status,DurationMilliseconds,StartTime,Location FROM ApexLog " + \
+ "WHERE LogUserId='%s' ORDER BY StartTime DESC LIMIT %s" % (user_id, last_n_logs)
self.result = self.query(soql, is_toolingapi=True)
return self.result
@@ -572,12 +573,12 @@ def combine_soql(self, sobject, action=None, contains_compound=True):
self.result = result
return self.result
- fields = sorted(result["fields"], key=lambda k : k['custom'])
+ fields = sorted(result["fields"], key=lambda k: k['custom'])
field_list = []
for field in fields:
# http://www.salesforce.com/us/developer/docs/api/Content/compound_fields_address.htm
if not contains_compound and field.get("queryByDistance"): continue
- if not action or field[action]:
+ if not action or field[action]:
field_list.append(field.get("name"))
# Id must be included in the field list
@@ -721,7 +722,7 @@ def create_trace_flag(self, traced_entity_id=None):
# Check whether traced user already has trace flag
# If not, just create it for him/her
- query = "SELECT Id, ExpirationDate FROM TraceFlag " +\
+ query = "SELECT Id, ExpirationDate FROM TraceFlag " + \
"WHERE TracedEntityId = '%s'" % (traced_entity_id)
result = self.query(query, True)
@@ -734,7 +735,7 @@ def create_trace_flag(self, traced_entity_id=None):
if result["totalSize"] > 0:
self.delete("/tooling/sobjects/TraceFlag/" + result["records"][0]["Id"])
return self.create_trace_flag(traced_entity_id)
-
+
# Start to create Trace Flag
trace_flag = self.settings["trace_flag"]
@@ -744,7 +745,7 @@ def create_trace_flag(self, traced_entity_id=None):
if not debug_level["success"]:
self.result = debug_level
return self.result
-
+
trace_flag["LogType"] = "USER_DEBUG"
trace_flag["DebugLevelId"] = debug_level["id"]
@@ -766,13 +767,13 @@ def create_trace_flag(self, traced_entity_id=None):
def get_debug_level(self, name="haoide"):
debug_levels = self.query(
- "SELECT Id FROM DebugLevel WHERE DeveloperName = '%s'" % name,
+ "SELECT Id FROM DebugLevel WHERE DeveloperName = '%s'" % name,
is_toolingapi=True
)
if debug_levels["success"]:
if debug_levels["totalSize"] > 0:
debug_level = debug_levels["records"][0]
- debug_level["id"] = debug_level["Id"] # Prevent keyError problem
+ debug_level["id"] = debug_level["Id"] # Prevent keyError problem
debug_level["success"] = True
return debug_level
@@ -806,7 +807,7 @@ def retrieve_body(self, retrieve_url, timeout=120):
response.encoding = "UTF-8"
except requests.exceptions.RequestException as e:
self.result = {
- "Error Message": "Network connection timeout when issuing RETRIVING BODY request",
+ "Error Message": "Network connection timeout when issuing RETRIVING BODY request",
"success": False
}
return self.result
@@ -818,7 +819,7 @@ def retrieve_body(self, retrieve_url, timeout=120):
result = self.parse_response(response)
self.result = result
-
+
return result
def run_tests_asynchronous(self, class_ids):
@@ -841,9 +842,9 @@ def run_tests_synchronous(self, class_name, test_names):
* test_names -- Apex Test Method Name List
"""
if test_names and len(test_names) > 0:
- data = {"tests":[{"className":class_name,"testMethods":test_names}]}
+ data = {"tests": [{"className": class_name, "testMethods": test_names}]}
else:
- data = {"tests":[{"className":class_name}]}
+ data = {"tests": [{"className": class_name}]}
self.result = self.post("/tooling/runTestsSynchronous/", data)
return self.result
@@ -872,12 +873,12 @@ def run_test(self, class_id):
time.sleep(2)
data = {"ApexClassId": class_id}
result = self.post("/sobjects/ApexTestQueueItem", data)
-
+
# Exception Process
if not result["success"]:
self.result = result
return self.result
-
+
# Wait for the ApexTestQueueItem is over
time.sleep(5)
queue_item_id = result["id"]
@@ -888,7 +889,7 @@ def run_test(self, class_id):
if not result["success"]:
self.result = result
return self.result
-
+
# If totalSize is Zero, it means we need to wait until test is finished
while result["totalSize"] == 0 or result["records"][0]["Status"] in ["Queued", "Processing"]:
time.sleep(2)
@@ -898,7 +899,7 @@ def run_test(self, class_id):
AsyncApexJobId,Id,Message,MethodName,Outcome,QueueItemId,StackTrace,
TestTimestamp FROM ApexTestResult WHERE QueueItemId = '%s'""" % queue_item_id
- # After Test is finished, get result
+ # After Test is finished, get result
result = self.query(test_result_soql)
# Exception Process
@@ -907,7 +908,7 @@ def run_test(self, class_id):
return self.result
result = result["records"]
-
+
# Combine these two result
self.result = result
@@ -927,9 +928,10 @@ def generate_workbook(self, sobjects):
return result
workspace = self.settings.get("workspace")
- outputdir = util.generate_workbook(result, workspace,
- self.settings.get("workbook_field_describe_columns"))+"/"+sobject+".csv"
- print (sobject + " workbook outputdir: " + outputdir)
+ outputdir = util.generate_workbook(result, workspace,
+ self.settings.get(
+ "workbook_field_describe_columns")) + "/" + sobject + ".csv"
+ print(sobject + " workbook outputdir: " + outputdir)
def save_to_server(self, component_attribute, body, is_check_only, check_save_conflict=True):
""" This method contains 5 steps:
@@ -962,8 +964,8 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
if self.settings["check_save_conflict"] and not is_check_only and check_save_conflict:
Printer.get('log').write("Start to check saving conflict")
- query = "SELECT Id, LastModifiedById, LastModifiedBy.Id, " +\
- "LastModifiedBy.Name, LastModifiedDate, SystemModstamp " +\
+ query = "SELECT Id, LastModifiedById, LastModifiedBy.Id, " + \
+ "LastModifiedBy.Name, LastModifiedDate, SystemModstamp " + \
"FROM %s WHERE Id = '%s'" % (component_type, component_id)
result = self.query(query, True)
@@ -983,13 +985,13 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
# If local date is different with server date or lastModifiedBy is not you,
# it means there has conflict
- lastModifiedByYou = class_attr["LastModifiedById"] == self.session["user_id"]
- timeStampMatch = serverDateLiteral[:19] == localDateLiteral[:19]
- if not lastModifiedByYou or not timeStampMatch:
+ last_modified_by_me = class_attr["LastModifiedById"] == self.session["user_id"]
+ time_stamp_match = serverDateLiteral[:19] == localDateLiteral[:19]
+ if not last_modified_by_me or not time_stamp_match:
# Used for debug
if self.settings["debug_mode"]:
- print ("localDateLiteral: " + localDateLiteral)
- print ("serverDateLiteral: " + serverDateLiteral)
+ print("localDateLiteral: " + localDateLiteral)
+ print("serverDateLiteral: " + serverDateLiteral)
message = "Modified by %s at %s, continue?" % (
lastModifiedBy["Name"], serverLastModifiedDateZone
@@ -1007,8 +1009,8 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
# Get MetadataContainerId
Printer.get('log').write("Start to fetch MetadataContainerId")
- data = {
- "name": "Save" + component_type[4 : len(component_type)] + component_id
+ data = {
+ "name": "Save" + component_type[4: len(component_type)] + component_id
}
container_url = "/tooling/sobjects/MetadataContainer"
result = self.post(container_url, data)
@@ -1024,11 +1026,11 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
container_id = error_message[error_message.rindex("1dc"): len(error_message)]
delete_result = self.delete(container_url + "/" + container_id)
if delete_result["success"]:
- sublime.set_timeout(lambda:sublime.status_message("container_id is deleted."), 10)
+ sublime.set_timeout(lambda: sublime.status_message("container_id is deleted."), 10)
else:
self.result = delete_result
return delete_result
-
+
# We can't reuse the container_id which caused error
# Post Request to get MetadataContainerId
return self.save_to_server(component_attribute, body, is_check_only, False)
@@ -1036,7 +1038,7 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
self.result = result
return self.result
- # Post ApexComponentMember
+ # Save the code to server by posting Apex[Class/Page/Component]Member
data = {
"ContentEntityId": component_id,
"MetadataContainerId": container_id,
@@ -1053,13 +1055,13 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
}
url = "/tooling/sobjects/" + component_type + "Member"
member_result = self.post(url, data)
-
+
# Check whether user has privilege of `Author Apex`
if "errorCode" in member_result and member_result["errorCode"] == "NOT_FOUND":
# Before return error to console, we need to delete the container_id
# If delete failed, next saving operation will handle it
sublime.set_timeout_async(self.delete(container_url + "/" + container_id), 100)
-
+
self.result = {
"success": False,
"Error Message": "You don't have privilege on 'Author Apex'"
@@ -1087,7 +1089,7 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
Printer.get('log').write("ContainerAsyncRequest is in Queued, waiting...")
result = self.get(sync_request_url + "/" + request_id)
state = result["State"]
-
+
return_result = {
"lastModifiedDate": result["LastModifiedDate"]
}
@@ -1114,7 +1116,7 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
else:
compile_errors = unescape(result["CompilerErrors"])
compile_errors = json.loads(compile_errors)
-
+
return_result = {}
if len(compile_errors) > 0:
return_result = compile_errors[0]
@@ -1132,7 +1134,7 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
problem = "\n".join(problem)
return_result["problem"] = urllib.parse.unquote(
unescape(problem, {
- "'": "'",
+ "'": "'",
""": '"'
})
)
@@ -1148,8 +1150,8 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
if "columnNumber" not in return_result:
return_result["columnNumber"] = 0
- return_result["success"] = False
-
+ return_result["success"] = False
+
if return_result["success"] and component_type == "ApexClass":
sublime.set_timeout_async(self.write_symbol_table_cache(member_result["id"]), 5)
@@ -1162,7 +1164,7 @@ def save_to_server(self, component_attribute, body, is_check_only, check_save_co
def save_lightning_to_server(self, component_attribute, body, check_save_conflict=True):
"""
Save Lightning AuraDefinition or LightningComponentResource such as component makeup, controller and helper or
- Lightning Web component teampl to Salesforce
+ Lightning Web component resource to Salesforce
Arguments:
@param component_attribute: attribute of component, e.g., component id, url
@@ -1180,16 +1182,16 @@ def handle_error_message(result):
try:
if "\n" in result["message"]:
error_messages = result["message"].split('\n')
- if "CSS Parser" in error_messages[0] :
+ if "CSS Parser" in error_messages[0]:
error_msg = error_messages[2]
_result["problem"] = error_msg
- error_line_info = error_msg[error_msg.find("(")+1: error_msg.find(")")]
+ error_line_info = error_msg[error_msg.find("(") + 1: error_msg.find(")")]
_result["lineNumber"] = error_line_info.split(",")[0][5:]
_result["columnNumber"] = error_line_info.split(",")[1][5:]
else:
error_base_info = error_messages[0].split(': ')
error_line_info = error_base_info[1].split(':')[1]
- error_line_info = error_line_info[1 : len(error_line_info) - 1]
+ error_line_info = error_line_info[1: len(error_line_info) - 1]
_result['id'] = error_base_info[0]
_result["lineNumber"] = error_line_info.split(',')[0]
_result["columnNumber"] = error_line_info.split(',')[1]
@@ -1200,7 +1202,7 @@ def handle_error_message(result):
m = re.search(r'\[\d+,\s*\d+\]', result["message"])
if m:
col_row = m.group(0)
- col_row = col_row[1:len(col_row)-1]
+ col_row = col_row[1:len(col_row) - 1]
_result["lineNumber"] = col_row.split(',')[0]
_result["columnNumber"] = col_row.split(',')[1]
except Exception as _ex:
@@ -1291,7 +1293,7 @@ def handle_error_message(result):
def write_symbol_table_cache(self, member_id):
# Get the symbol table from ApexClassMember
- query = "SELECT Id, SymbolTable " +\
+ query = "SELECT Id, SymbolTable " + \
"FROM ApexClassMember WHERE Id ='%s'" % member_id
member = self.query(query, True)
@@ -1307,7 +1309,7 @@ def write_symbol_table_cache(self, member_id):
outer = util.parse_symbol_table(symbol_table)
symboltable_dict[symbol_table["name"].lower()] = {
- "outer" : outer,
+ "outer": outer,
"name": symbol_table["name"]
}
@@ -1319,4 +1321,4 @@ def write_symbol_table_cache(self, member_id):
symboltable_dict[symbol_table["name"].lower()]["inners"] = inners
symbol_table_cache.set(self.settings["username"], symboltable_dict)
- sublime.save_settings("symbol_table.sublime-settings")
\ No newline at end of file
+ sublime.save_settings("symbol_table.sublime-settings")
diff --git a/salesforce/lib/apex.py b/salesforce/lib/apex.py
index d51c883..aa09a4a 100644
--- a/salesforce/lib/apex.py
+++ b/salesforce/lib/apex.py
@@ -697,6 +697,7 @@
"Separator"
],
"System": [
+ "AccessType",
"XmlException",
"RequiredFeatureMissingException",
"SearchException",
@@ -737,7 +738,8 @@
"ApexPages",
"Approval",
"QuickAction",
- "SObject",
+ "SObject",
+ "SObjectAccessDecision",
"AssertException",
"Date",
"Messaging",
@@ -829,7 +831,8 @@
"QueueableContextImpl",
"Site",
"UserManagement",
- "Callable"
+ "Callable",
+ "Security",
],
"Canvas": [
"CanvasRenderException",
@@ -864,14 +867,15 @@
"EmailFileAttachment",
"AttachmentRetrievalOption",
"InboundEmailResult",
- "InboundEmail",
- "RenderEmailTemplateError",
+ "InboundEmail",
"EmailToSalesforceHandler",
"SingleEmailMessage",
"TextAttachment",
"PushNotification",
- "PushNotificationPayload",
- "RenderEmailTemplateBodyResult",
+ "PushNotificationPayload",
+ "CustomNotification",
+ "RenderEmailTemplateBodyResult",
+ "RenderEmailTemplateError",
"MassEmailMessage"
],
"DataProtection": [
@@ -3082,7 +3086,25 @@
},
"namespace": "Messaging",
"properties": {}
- },
+ },
+ "customnotification": {
+ "constructors": {
+ "CustomNotification()\t": "CustomNotification()$0",
+ "CustomNotification(String typeId, String sender, String title, String body, String targetId, String targetPageRef)\t": "CustomNotification(${1:String typeId}, ${2:String sender}, ${3:String title}, ${4:String body}, ${5:String targetId}, ${6:String targetPageRef})$0"
+ },
+ "name": "CustomNotification",
+ "methods": {
+ "send(Set users)\tvoid": "send(${1:Set users})$0",
+ "setNotificationTypeId(String id)\tvoid": "setNotificationTypeId(${1:String id})$0",
+ "setTitle(String title)\tvoid": "setTitle(${1:String title})$0",
+ "setBody(String body)\tvoid": "setBody(${1:String body})$0",
+ "setSenderId(String id)\tvoid": "setSenderId(${1:String id})$0",
+ "setTargetId(String targetId)\tvoid": "setTargetId(${1:String targetId})$0",
+ "setTargetPageRef(String pageRef)\tvoid": "setTargetPageRef(${1:String pageRef})$0",
+ },
+ "namespace": "Messaging",
+ "properties": {}
+ },
"sortorder": [
{
"constructors": {},
@@ -9658,7 +9680,13 @@
"getLimitFieldsDescribes()\tInteger": "getLimitFieldsDescribes()$0",
"getChildRelationshipsDescribes()\tInteger": "getChildRelationshipsDescribes()$0",
"getHeapSize()\tInteger": "getHeapSize()$0",
- "getPickListDescribes()\tInteger": "getPickListDescribes()$0"
+ "getPickListDescribes()\tInteger": "getPickListDescribes()$0",
+ "getMobilePushApexCalls()\tInteger": "getMobilePushApexCalls()$0",
+ "getLimitMobilePushApexCalls()\tInteger": "getLimitMobilePushApexCalls()$0",
+ "getPublishImmediateDML()\tInteger": "getPublishImmediateDML()$0",
+ "getLimitPublishImmediateDML()\tInteger": "getLimitPublishImmediateDML()$0",
+ "getQueueableJobs()\tInteger": "getQueueableJobs()$0",
+ "getLimitQueueableJobs()\tInteger": "getLimitQueueableJobs()$0",
},
"namespace": "Limits",
"name": "Limits"
@@ -23310,5 +23338,38 @@
"properties": {
"fileCount": "fileCount$0"
}
- }
-}
\ No newline at end of file
+ },
+ "security": {
+ "constructors": {},
+ "name": "Security",
+ "methods": {
+ "stripInaccessible(System.AccessType accessCheckType, List sourceRecords, Boolean enforceRootObjectCRUD)\tSystem.SObjectAccessDecision": "stripInaccessible(${1:System.AccessType accessCheckType}, ${2:List sourceRecords}, ${3:Boolean enforceRootObjectCRUD})$0",
+ "stripInaccessible(System.AccessType accessCheckType, List sourceRecords)\tSystem.SObjectAccessDecision": "stripInaccessible(${1:System.AccessType accessCheckType}, ${2:List sourceRecords})$0",
+ },
+ "namespace": "System",
+ "properties": {}
+ },
+ "accesstype": {
+ "constructors": {},
+ "name": "AccessType",
+ "methods": {},
+ "namespace": "System",
+ "properties": {
+ "CREATABLE": "CREATABLE$0",
+ "READABLE": "READABLE$0",
+ "UPDATABLE": "UPDATABLE$0",
+ "UPSERTABLE": "UPSERTABLE$0",
+ }
+ },
+ "sobjectaccessdecision": {
+ "constructors": {},
+ "name": "SObjectAccessDecision",
+ "methods": {
+ "getModifiedIndexes()\tSet": "getModifiedIndexes()$0",
+ "getRecords()\tList": "getRecords()$0",
+ "getRemovedFields()\tMap>": "getRemovedFields()$0",
+ },
+ "namespace": "System",
+ "properties": {}
+ },
+}
diff --git a/util.py b/util.py
index 44cfd94..fdb2d90 100644
--- a/util.py
+++ b/util.py
@@ -32,7 +32,7 @@ def load_templates():
"""
settings = context.get_settings()
target_dir = os.path.join(settings["workspace"], ".templates")
- if not os.path.exists(target_dir):
+ if not os.path.exists(target_dir):
os.makedirs(target_dir)
templates_dir = os.path.join(target_dir, "templates.json")
@@ -43,7 +43,7 @@ def load_templates():
if not os.path.isfile(templates_dir) or not os.path.exists(lwc_dir) or not os.path.exists(lwc_ele_dir):
# get the installed haoide package directory
source_dir = os.path.join(
- sublime.installed_packages_path(),
+ sublime.installed_packages_path(),
"haoide.sublime-package"
)
@@ -87,19 +87,19 @@ def copy_files_in_folder(source_dir, target_dir):
@target_dir -- Target Directory
"""
- for _file in os.listdir(source_dir):
- sourceFile = os.path.join(source_dir, _file)
- targetFile = os.path.join(target_dir, _file)
+ for _file in os.listdir(source_dir):
+ sourceFile = os.path.join(source_dir, _file)
+ targetFile = os.path.join(target_dir, _file)
- if os.path.isfile(sourceFile):
- if not os.path.exists(target_dir):
- os.makedirs(target_dir)
+ if os.path.isfile(sourceFile):
+ if not os.path.exists(target_dir):
+ os.makedirs(target_dir)
if not os.path.exists(targetFile) or (
os.path.exists(targetFile) and (
- os.path.getsize(targetFile) != os.path.getsize(sourceFile)
- )):
- open(targetFile, "wb").write(open(sourceFile, "rb").read())
- if os.path.isdir(sourceFile):
+ os.path.getsize(targetFile) != os.path.getsize(sourceFile)
+ )):
+ open(targetFile, "wb").write(open(sourceFile, "rb").read())
+ if os.path.isdir(sourceFile):
copy_files_in_folder(sourceFile, targetFile)
@@ -130,7 +130,7 @@ def copy_files(attributes, target_dir):
# Build target file
target_file = os.path.join(
- target_meta_folder,
+ target_meta_folder,
attribute["fullName"]
)
@@ -160,8 +160,8 @@ def copy_files(attributes, target_dir):
def get_described_metadata(settings):
cache_file = os.path.join(
- settings["workspace"],
- ".config",
+ settings["workspace"],
+ ".config",
"metadata.json"
)
@@ -222,6 +222,7 @@ def get_package_info(settings):
return package
+
def get_completion_from_cache(settings, component_type, is_lightning=False):
""" Get component completion list from .config/package.json
@@ -238,7 +239,7 @@ def get_completion_from_cache(settings, component_type, is_lightning=False):
members = package_cache.get(component_type, [])
for member in members:
completion_list.append((
- "%s%s\t%s" % (namespace, member.get("fullName"), component_type),
+ "%s%s\t%s" % (namespace, member.get("fullName"), component_type),
"%s%s" % (namespace, member.get("fullName"))
))
else:
@@ -246,6 +247,7 @@ def get_completion_from_cache(settings, component_type, is_lightning=False):
return completion_list
+
def view_coverage(file_name, record, body):
"""
View Apex Class/Trigger code coverage like developer console UI
@@ -322,7 +324,7 @@ def local_datetime(server_datetime_str):
* local_datetime -- local datetime with GMT offset
"""
-
+
offset = get_local_timezone_offset()
local_datetime = datetime.datetime.strptime(server_datetime_str[:19], '%Y-%m-%dT%H:%M:%S')
local_datetime += datetime.timedelta(hours=offset)
@@ -365,7 +367,7 @@ def populate_all_components():
component_id = component_attributes[key]["id"]
component_type = component_attributes[key]["type"]
component_name = component_attributes[key]["name"]
- return_component_attributes[component_type+"."+component_name] = component_id
+ return_component_attributes[component_type + "." + component_name] = component_id
return return_component_attributes
@@ -462,7 +464,7 @@ def set_component_attribute(attributes, lastModifiedDate):
* attributes -- component attributes
* lastModifiedDate -- LastModifiedDate of component
"""
-
+
# If sobjects is exist in local cache, just return it
settings = context.get_settings()
username = settings["username"]
@@ -475,13 +477,14 @@ def set_component_attribute(attributes, lastModifiedDate):
components_dict = s.get(username, {})
# Prevent exception if no component in org
- if _type not in components_dict:
- components_dict = {_type : {}}
+ if _type not in components_dict:
+ components_dict = {_type: {}}
- # Build components dict
- attr = components_dict[_type][fullName.lower()]
+ # update components dict lastModifiedDate atrribute
+ attr = components_dict[_type][fullName.lower()]
attr["lastModifiedDate"] = lastModifiedDate
- components_dict[_type][fullName.lower()] = attr
+ # Comment out unnecessary assignment
+ # components_dict[_type][fullName.lower()] = attr
# Save settings and show success message
s.set(username, components_dict)
@@ -556,8 +559,8 @@ def get_symbol_tables(username):
def get_sobject_completion_list(
- sobject_describe,
- prefix="",
+ sobject_describe,
+ prefix="",
display_fields=True,
display_parent_relationships=True,
display_child_relationships=True):
@@ -584,9 +587,9 @@ def get_sobject_completion_list(
if display_parent_relationships:
for key in sorted(sobject_describe["parentRelationships"]):
parent_sobject = sobject_describe["parentRelationships"][key]
- completion_list.append((prefix + key + "\t" + parent_sobject + "(c2p)", key))
+ completion_list.append((prefix + key + "\t" + parent_sobject + "(c2p)", key))
- # Child Relationship Describe
+ # Child Relationship Describe
if display_child_relationships:
for key in sorted(sobject_describe["childRelationships"]):
child_sobject = sobject_describe["childRelationships"][key]
@@ -624,7 +627,7 @@ def get_component_completion(username, component_type, tag_has_ending=False):
value = "c:%s%s" % (component_name, "" if tag_has_ending else "$1>")
completion_list.append((display, value))
else:
- completion_list.append((component_name+"\t"+component_type, component_name))
+ completion_list.append((component_name + "\t" + component_type, component_name))
return completion_list
@@ -634,7 +637,7 @@ def get_component_attributes(settings, component_name, is_lightning=False):
"aura", component_name, component_name + ".cmp")
else:
component_dir = os.path.join(settings["workspace"], "src",
- "components", component_name+".component")
+ "components", component_name + ".component")
attributes = []
if os.path.isfile(component_dir):
name, _type, description = "", "", ""
@@ -671,12 +674,13 @@ def get_component_attributes(settings, component_name, is_lightning=False):
return attributes
+
def get_attribute_completion(settings, component_name, is_lightning=False):
completion_list = []
for attribute in get_component_attributes(settings, component_name, is_lightning):
display = "%s\t%s(%s)" % (
- attribute["name"],
- attribute["description"],
+ attribute["name"],
+ attribute["description"],
attribute["type"].capitalize()
)
value = '%s="$1"$0' % attribute["name"]
@@ -684,6 +688,7 @@ def get_attribute_completion(settings, component_name, is_lightning=False):
return completion_list
+
def convert_15_to_18(the15Id):
""" Convert Salesforce 15 Id to 18 Id
@@ -695,21 +700,21 @@ def convert_15_to_18(the15Id):
* 18 Id - converted 18 Id
"""
-
+
if not the15Id or len(the15Id) != 15: return the15Id
cmap = {
"00000": "A", "00001": "B", "00010": "C", "00011": "D", "00100": "E",
"00101": "F", "00110": "G", "00111": "H", "01000": "I", "01001": "J",
- "01010": "K", "01011": "L", "01100": "M", "01101": "N", "01110": "O",
- "01111": "P", "10000": "Q", "10001": "R", "10010": "S", "10011": "T",
- "10100": "U", "10101": "V", "10110": "W", "10111": "X", "11000": "Y",
- "11001": "Z", "11010": "0", "11011": "1", "11100": "2", "11101": "3",
+ "01010": "K", "01011": "L", "01100": "M", "01101": "N", "01110": "O",
+ "01111": "P", "10000": "Q", "10001": "R", "10010": "S", "10011": "T",
+ "10100": "U", "10101": "V", "10110": "W", "10111": "X", "11000": "Y",
+ "11001": "Z", "11010": "0", "11011": "1", "11100": "2", "11101": "3",
"11110": "4", "11111": "5"
}
chars = [cmap["".join(["1" if c.isupper() else "0" for c in char[::-1]])] \
- for char in list_chunks(the15Id, 5)]
+ for char in list_chunks(the15Id, 5)]
return the15Id + "".join(chars)
@@ -723,7 +728,7 @@ def list_chunks(l, n):
* n - split size
"""
for i in range(0, len(l), n):
- yield l[i:i+n]
+ yield l[i:i + n]
def dict_chunks(data, SIZE=10000):
@@ -793,7 +798,7 @@ def get_variable_type(view, pt, pattern):
# Get the matched variable type
matched_regions = view.find_all(pattern, sublime.IGNORECASE)
uncomment_regions = remove_comments(view, matched_regions)
-
+
# Three scenarios:
# 1. If no matched regions
# 2. Only one matched region
@@ -803,7 +808,7 @@ def get_variable_type(view, pt, pattern):
elif len(uncomment_regions) == 1:
matched_region = uncomment_regions[0]
else:
- row_region = {} # Row => Region
+ row_region = {} # Row => Region
for mr in uncomment_regions:
row, col = view.rowcol(mr.begin())
row_region[row] = mr
@@ -823,7 +828,7 @@ def get_variable_type(view, pt, pattern):
# Get the content of matched region
matched_str = view.substr(matched_region).strip()
-
+
# If list, map, set
if "<" in matched_str and ">" in matched_str:
variable_type = matched_str.split("<")[0].strip()
@@ -860,7 +865,7 @@ def get_soql_match_region(view, pt):
matched_region = m
break
- if not matched_region:
+ if not matched_region:
return (matched_region, is_between_start_and_from, sobject_name)
match_str = view.substr(matched_region)
@@ -870,7 +875,7 @@ def get_soql_match_region(view, pt):
if pt >= (select_pos + match_begin) and pt <= (from_pos + match_begin):
is_between_start_and_from = True
- sobject_name = match_str[from_pos+5:]
+ sobject_name = match_str[from_pos + 5:]
sobject_name = sobject_name.strip()
return (matched_region, is_between_start_and_from, sobject_name)
@@ -885,7 +890,7 @@ def parse_symbol_table(symbol_table):
"""
completions = {}
- if not symbol_table:
+ if not symbol_table:
return completions;
for c in symbol_table.get('constructors', []):
@@ -896,37 +901,37 @@ def parse_symbol_table(symbol_table):
params.append(p["type"].capitalize() + " " + p["name"])
paramStrings = []
for i, p in enumerate(params):
- paramStrings.append("${"+str(i+1)+":"+params[i]+"}")
+ paramStrings.append("${" + str(i + 1) + ":" + params[i] + "}")
paramString = ", ".join(paramStrings)
- completions[modifiers+" "+c["name"]+"("+", ".join(params)+")"] =\
+ completions[modifiers + " " + c["name"] + "(" + ", ".join(params) + ")"] = \
"%s(%s)" % (c["name"], paramString)
else:
- completions[modifiers+" "+c["name"]+"()"] = c["name"]+"()${1:}"
+ completions[modifiers + " " + c["name"] + "()"] = c["name"] + "()${1:}"
for c in symbol_table.get('properties', []):
modifiers = " ".join(c.get("modifiers", []))
property_type = c["type"].capitalize() if "type" in c and c["type"] else ""
- completions[modifiers+" "+c["name"]+"\t"+property_type] = c["name"]
+ completions[modifiers + " " + c["name"] + "\t" + property_type] = c["name"]
for c in symbol_table.get('methods', []):
params = []
modifiers = " ".join(c.get("modifiers", []))
if 'parameters' in c and type(c['parameters']) is list and len(c['parameters']) > 0:
for p in c['parameters']:
- params.append(p["type"]+" "+p["name"])
+ params.append(p["type"] + " " + p["name"])
if len(params) == 1:
- completions[modifiers+" "+c["name"]+"("+", ".join(params)+") \t"+c['returnType']] =\
+ completions[modifiers + " " + c["name"] + "(" + ", ".join(params) + ") \t" + c['returnType']] = \
"%s(${1:%s})" % (c["name"], ", ".join(params))
elif len(params) > 1:
paramStrings = []
for i, p in enumerate(params):
- paramStrings.append("${"+str(i+1)+":"+params[i]+"}")
+ paramStrings.append("${" + str(i + 1) + ":" + params[i] + "}")
paramString = ", ".join(paramStrings)
- completions[modifiers+" "+c["name"]+"("+", ".join(params)+") \t"+c['returnType']] =\
- c["name"]+"("+paramString+")"
+ completions[modifiers + " " + c["name"] + "(" + ", ".join(params) + ") \t" + c['returnType']] = \
+ c["name"] + "(" + paramString + ")"
else:
- completions[modifiers+" "+c["name"]+"("+", ".join(params)+") \t"+c['returnType']] =\
- c["name"]+"()${1:}"
+ completions[modifiers + " " + c["name"] + "(" + ", ".join(params) + ") \t" + c['returnType']] = \
+ c["name"] + "()${1:}"
for c in symbol_table.get("innerClasses", []):
tableDeclaration = c.get("tableDeclaration")
@@ -943,16 +948,16 @@ def parse_symbol_table(symbol_table):
params = []
if 'parameters' in con and type(con['parameters']) is list and len(con['parameters']) > 0:
for p in con['parameters']:
- params.append(p["type"].capitalize()+" "+p["name"])
+ params.append(p["type"].capitalize() + " " + p["name"])
paramStrings = []
for i, p in enumerate(params):
- paramStrings.append("${"+str(i+1)+":"+params[i]+"}")
+ paramStrings.append("${" + str(i + 1) + ":" + params[i] + "}")
paramString = ", ".join(paramStrings)
- completions[modifiers+" "+con["name"]+"("+", ".join(params)+")"] =\
- c["name"]+"("+paramString+")"
+ completions[modifiers + " " + con["name"] + "(" + ", ".join(params) + ")"] = \
+ c["name"] + "(" + paramString + ")"
else:
- completions[modifiers+" "+con["name"]+"()"] =\
- c["name"]+"()${1:}"
+ completions[modifiers + " " + con["name"] + "()"] = \
+ c["name"] + "()${1:}"
return completions
@@ -993,20 +998,20 @@ def add_config_history(operation, content, settings, ext="json"):
* history_content -- the content needed to keep
"""
outputdir = os.path.join(settings["workspace"], ".config")
- if not os.path.exists(outputdir):
+ if not os.path.exists(outputdir):
os.makedirs(outputdir)
with open(outputdir + "/%s.%s" % (operation, ext), "w") as fp:
fp.write(json.dumps(content, indent=4))
# After write the file to local, refresh sidebar
- sublime.set_timeout(lambda:sublime.active_window().run_command('refresh_folder_list'), 200);
- sublime.set_timeout(lambda:sublime.active_window().run_command('refresh_folder_list'), 1300);
+ sublime.set_timeout(lambda: sublime.active_window().run_command('refresh_folder_list'), 200);
+ sublime.set_timeout(lambda: sublime.active_window().run_command('refresh_folder_list'), 1300);
def export_report_api(rootdir):
reports = []
- for parent,dirnames,filenames in os.walk(rootdir):
+ for parent, dirnames, filenames in os.walk(rootdir):
for filename in filenames:
if not filename.endswith(".report"): continue
report_dir = parent + "/" + filename
@@ -1152,7 +1157,7 @@ def parse_package_types(_types):
# If no elements, don't keep it
if not elements:
continue
-
+
# inFolder is false
if attr["inFolder"] == "false":
package_types[_type] = elements
@@ -1162,7 +1167,7 @@ def parse_package_types(_types):
for folder in [e for e in elements if "/" not in e]:
folder_elements[folder] = [
e for e in elements if e.startswith(folder) \
- and "/" in e
+ and "/" in e
]
package_types[_type] = folder_elements
continue
@@ -1206,7 +1211,7 @@ def build_package_types(package_xml_content):
metadata_types = result["Package"]["types"]
# If there is only one types in package
- if isinstance(metadata_types, dict):
+ if isinstance(metadata_types, dict):
metadata_types = [metadata_types]
types = {}
@@ -1235,7 +1240,7 @@ def build_folder_types(dirs):
types = {}
for _dir in dirs:
base, folder = os.path.split(_dir)
-
+
if folder not in settings: continue
if dname not in _dir: continue
@@ -1268,11 +1273,11 @@ def build_package_dict(files, ignore_folder=True):
package_dict = {}
for f in files:
# Ignore folder
- if ignore_folder and not os.path.isfile(f):
+ if ignore_folder and not os.path.isfile(f):
continue
# Replace meta file with source file
- if f.endswith("-meta.xml"):
+ if f.endswith("-meta.xml"):
f = f.replace("-meta.xml", "")
# If ignore_folder is true and f is folder
@@ -1344,10 +1349,11 @@ def build_package_xml(settings, package_dict):
return package_xml_content
+
def build_destructive_package_by_files(files, ignore_folder=True):
settings = context.get_settings()
workspace = settings["workspace"]
- if not os.path.exists(workspace):
+ if not os.path.exists(workspace):
os.makedirs(workspace)
# Constucture package dict
@@ -1355,13 +1361,13 @@ def build_destructive_package_by_files(files, ignore_folder=True):
# Build destructiveChanges.xml
destructive_xml_content = build_package_xml(settings, package_dict)
- destructive_xml_path = workspace+"/destructiveChanges.xml"
+ destructive_xml_path = workspace + "/destructiveChanges.xml"
with open(destructive_xml_path, "wb") as fp:
fp.write(destructive_xml_content.encode("utf-8"))
# Build package.xml
package_xml_content = build_package_xml(settings, {})
- package_xml_path = workspace+"/package.xml"
+ package_xml_path = workspace + "/package.xml"
with open(package_xml_path, "wb") as fp:
fp.write(package_xml_content.encode("utf-8"))
@@ -1387,6 +1393,7 @@ def build_destructive_package_by_files(files, ignore_folder=True):
return base64_package
+
def build_destructive_package_by_package_xml(types):
""" Build destructive package,
@@ -1408,13 +1415,13 @@ def build_destructive_package_by_package_xml(types):
# Build destructiveChanges.xml
destructive_xml_content = build_package_xml(settings, types)
- destructive_xml_path = workspace+"/destructiveChanges.xml"
+ destructive_xml_path = workspace + "/destructiveChanges.xml"
with open(destructive_xml_path, "wb") as fp:
fp.write(destructive_xml_content.encode("utf-8"))
# Build package.xml
package_xml_content = build_package_xml(settings, {})
- package_xml_path = workspace+"/package.xml"
+ package_xml_path = workspace + "/package.xml"
with open(package_xml_path, "wb") as fp:
fp.write(package_xml_content.encode("utf-8"))
@@ -1460,9 +1467,9 @@ def build_deploy_package(files):
# Define write_to
write_to = (
- metadata_folder,
- ("/" + f["folder"]) if f["folder"] else "",
- f["name"],
+ metadata_folder,
+ ("/" + f["folder"]) if f["folder"] else "",
+ f["name"],
f["extension"]
)
@@ -1488,7 +1495,7 @@ def build_deploy_package(files):
package_xml_content = format_xml(package_xml_content)
if settings["debug_mode"]:
- print ("{seprate}\n[Package.xml for Deployment]: \n{seprate}\n{content}\n{seprate}".format(
+ print("{seprate}\n[Package.xml for Deployment]: \n{seprate}\n{content}\n{seprate}".format(
seprate="~" * 100,
content=package_xml_content.decode("UTF-8")
))
@@ -1497,13 +1504,13 @@ def build_deploy_package(files):
try:
time_stamp = time.strftime("%Y%m%d%H%M", time.localtime(time.time()))
xml_dir = os.path.join(settings["workspace"], ".deploy")
- if not os.path.exists(xml_dir):
+ if not os.path.exists(xml_dir):
os.mkdir(xml_dir)
-
+
# http://stackoverflow.com/questions/1627198/python-mkdir-giving-me-wrong-permissions
if not os.access(xml_dir, os.W_OK):
os.chmod(xml_dir, 0o755)
-
+
xml_dir = os.path.join(xml_dir, "package-%s.xml" % time_stamp)
with open(xml_dir, "wb") as fp:
fp.write(package_xml_content)
@@ -1536,12 +1543,12 @@ def compress_resource_folder(resource_folder):
static_resource_path, resource_name = os.path.split(resource_folder)
# Create StaticResource File
- static_resource_file = os.path.join(static_resource_path, resource_name+".resource")
+ static_resource_file = os.path.join(static_resource_path, resource_name + ".resource")
zf = zipfile.ZipFile(static_resource_file, "w", zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(resource_folder):
- basename = dirpath[len(resource_folder)+1:]
+ basename = dirpath[len(resource_folder) + 1:]
for filename in filenames:
- zf.write(os.path.join(dirpath, filename), basename+"/"+filename)
+ zf.write(os.path.join(dirpath, filename), basename + "/" + filename)
zf.close()
# Build package
@@ -1574,7 +1581,7 @@ def build_lightning_package(files_or_dirs, meta_type=""):
for dirpath, dirnames, filenames in os.walk(_file_or_dir):
base, aura_name = os.path.split(dirpath)
if not filenames:
- zf.write(dirpath, meta_folder+"/"+aura_name)
+ zf.write(dirpath, meta_folder + "/" + aura_name)
else:
for filename in filenames:
zf.write(os.path.join(dirpath, filename), "%s/%s/%s" % (
@@ -1599,7 +1606,7 @@ def build_lightning_package(files_or_dirs, meta_type=""):
meta_type,
settings["api_version"]
)
- package_xml_path = settings["workspace"]+"/package.xml"
+ package_xml_path = settings["workspace"] + "/package.xml"
open(package_xml_path, "wb").write(package_xml_content.encode("utf-8"))
zf.write(package_xml_path, "package.xml")
os.remove(package_xml_path)
@@ -1625,12 +1632,12 @@ def base64_encode(zipfile):
def compress_package(package_dir):
- zipfile_path = package_dir+"/archive.zip"
+ zipfile_path = package_dir + "/archive.zip"
zf = zipfile.ZipFile(zipfile_path, "w", zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(package_dir):
- basename = dirpath[len(package_dir)+1:]
+ basename = dirpath[len(package_dir) + 1:]
for filename in filenames:
- zf.write(os.path.join(dirpath, filename), basename+"/"+filename)
+ zf.write(os.path.join(dirpath, filename), basename + "/" + filename)
zf.close()
base64_package = base64_encode(zipfile_path)
@@ -1662,6 +1669,7 @@ def extract_encoded_zipfile(encoded_zip_file, extract_to, ignore_package_xml=Fal
# we need to refresh the sublime workspace to show it
sublime.active_window().run_command("refresh_folder_list")
+
def extract_zipfile(zipfile_path, extract_to):
""" Extract Zip File to current folder
"""
@@ -1672,7 +1680,7 @@ def extract_zipfile(zipfile_path, extract_to):
raise BaseException(str(ex))
return
- if not os.path.exists(extract_to):
+ if not os.path.exists(extract_to):
os.makedirs(extract_to)
for filename in zfile.namelist():
@@ -1698,7 +1706,7 @@ def extract_file(zipfile_path, extract_to, ignore_package_xml=False):
"""
zip_file = zipfile.ZipFile(zipfile_path, 'r')
for filename in zip_file.namelist():
- if filename.endswith('/'):
+ if filename.endswith('/'):
continue
if ignore_package_xml and filename == "unpackaged/package.xml":
@@ -1717,6 +1725,7 @@ def extract_file(zipfile_path, extract_to, ignore_package_xml=False):
zip_file.close()
+
def extract_zip(base64String, extract_to):
"""
1. Decode base64String to zip
@@ -1737,6 +1746,7 @@ def extract_zip(base64String, extract_to):
return zipfile_path
+
def parse_package(package_content):
"""Parse package types to specified format
@@ -1782,24 +1792,25 @@ def parse_package(package_content):
members.append("%s" % t["members"])
elements.append("%s%s" % (
- "".join(members),
+ "".join(members),
"%s" % t["name"]
))
return "".join(elements) + "%s" % result["Package"]["version"]
+
def reload_file_attributes(file_properties, settings=None, append=False):
- """ Keep the file attribute to local cache
+ """ Keep the file attribute to local cache for saving to server directly later
Paramter:
- * file_properties -- file attributes returned from server
+ * file_properties -- file attributes returned from server via Metadata API
* settings -- whole plugin settings
* append -- default is False, if append is false, it means local cache
of default project are reloaded by file properties, otherwise,
file properties will be appended to local cache
"""
# Get settings
- if not settings:
+ if not settings:
settings = context.get_settings()
metadata_body_or_markup = {
@@ -1813,7 +1824,7 @@ def reload_file_attributes(file_properties, settings=None, append=False):
# print(file_properties)
# If the package only contains `package.xml`
- if isinstance(file_properties, dict):
+ if isinstance(file_properties, dict):
file_properties = [file_properties]
component_settings = sublime.load_settings(context.COMPONENT_METADATA_SETTINGS)
@@ -1831,11 +1842,11 @@ def reload_file_attributes(file_properties, settings=None, append=False):
if metdata_object in all_components_attr:
components_attr = all_components_attr[metdata_object]
- base_name = filep['fileName'][filep['fileName'].rfind("/")+1:]
+ base_name = filep['fileName'][filep['fileName'].rfind("/") + 1:]
last_point = base_name.rfind(".")
name = base_name[:last_point]
- extension = ".%s" % base_name[last_point+1:]
-
+ extension = ".%s" % base_name[last_point + 1:]
+
attrs = {
"namespacePrefix": filep.get("namespacePrefix", None),
"name": name,
@@ -1867,7 +1878,8 @@ def reload_file_attributes(file_properties, settings=None, append=False):
sublime.save_settings(context.COMPONENT_METADATA_SETTINGS)
# Reload component metadata cache in globals()
- sublime.set_timeout(lambda:load_metadata_cache(True, settings["username"]), 5)
+ sublime.set_timeout(lambda: load_metadata_cache(True, settings["username"]), 5)
+
def format_debug_logs(settings, records):
if len(records) == 0: return "No available logs."
@@ -1914,22 +1926,23 @@ def format_debug_logs(settings, records):
# Headers
headers = ""
for header in debug_log_headers:
- headers += "%-*s" % (debug_log_headers_properties[header]["width"],
- debug_log_headers_properties[header]["label"])
+ headers += "%-*s" % (debug_log_headers_properties[header]["width"],
+ debug_log_headers_properties[header]["label"])
# Content
content = ""
- records = sorted(records, key=lambda k : k['StartTime'])
+ records = sorted(records, key=lambda k: k['StartTime'])
for record in records:
for header in debug_log_headers:
if header == "StartTime":
content += "%-*s" % (debug_log_headers_properties[header]["width"],
- local_datetime(record[header]))
+ local_datetime(record[header]))
continue
content += "%-*s" % (debug_log_headers_properties[header]["width"], record[header])
content += "\n"
- return "\n" + headers + "\n" + (len(headers) * "-") + "\n" + content[:len(content)-1]
+ return "\n" + headers + "\n" + (len(headers) * "-") + "\n" + content[:len(content) - 1]
+
def format_error_message(result):
"""Format message as below format
@@ -1946,17 +1959,21 @@ def format_error_message(result):
error_message = ""
for key, value in result.items():
- if isinstance(value, list):
- if value: value = value[0]
- else: continue
- elif not value: continue
-
+ if isinstance(value, list):
+ if value:
+ value = value[0]
+ else:
+ continue
+ elif not value:
+ continue
+
error_message += "% 30s\t" % "{0}: ".format(key)
- value = urllib.parse.unquote(unescape(none_value(value),
- {"'": "'", """: '"'}))
+ value = urllib.parse.unquote(unescape(none_value(value),
+ {"'": "'", """: '"'}))
error_message += "%-30s\t" % value + "\n"
- return error_message[:len(error_message)-1]
+ return error_message[:len(error_message) - 1]
+
def format_waiting_message(result, header=""):
error_message = header + "\n" + "-" * 100 + "\n"
@@ -1978,6 +1995,7 @@ def format_waiting_message(result, header=""):
return error_message
+
def format_xml(xml_string, indent="4"):
"""Return formatted XML string
@@ -1999,6 +2017,7 @@ def format_xml(xml_string, indent="4"):
return content
+
def none_value(value):
""" If value is None, return "", if not, return string format of value
@@ -2009,7 +2028,8 @@ def none_value(value):
if not value: return ""
return "%s" % value
-
+
+
def is_python3x():
"""
If python version is 3.x, return True
@@ -2017,9 +2037,12 @@ def is_python3x():
return sys.version > '3'
+
"""
Below three functions are used to parse completions out of box.
"""
+
+
def parse_namespace(publicDeclarations):
"""
from . import util
@@ -2037,6 +2060,7 @@ def parse_namespace(publicDeclarations):
return namespaces_dict
+
def parse_method(methods, is_method=True):
if not methods: return {}
@@ -2058,11 +2082,12 @@ def parse_method(methods, is_method=True):
for i in range(len(display_parameters)):
return_parameters.append("${%s:%s}" % (i + 1, display_parameters[i]))
- methods_dict["%s(%s)\t%s" % (method["name"], ', '.join(display_parameters), returnType)] =\
+ methods_dict["%s(%s)\t%s" % (method["name"], ', '.join(display_parameters), returnType)] = \
"%s(%s)$0" % (method["name"], ', '.join(return_parameters))
return methods_dict
+
def parse_properties(properties):
if not properties: return {}
properties_dict = {}
@@ -2071,6 +2096,7 @@ def parse_properties(properties):
return properties_dict
+
def parse_all(apex):
"""
Usage:
@@ -2108,10 +2134,10 @@ def parse_all(apex):
if class_name.lower() in apex_completions:
apex_completions[class_name.lower()] = [apex_completions[class_name.lower()]]
apex_completions[class_name.lower()].append({
- "constructors" : constructors_dict,
- "methods" : methods_dict,
- "properties" : properties_dict,
- "namespace" : namespace,
+ "constructors": constructors_dict,
+ "methods": methods_dict,
+ "properties": properties_dict,
+ "namespace": namespace,
"name": class_name
})
else:
@@ -2121,7 +2147,7 @@ def parse_all(apex):
apex_completions[class_name.lower()]["properties"] = properties_dict
apex_completions[class_name.lower()]["namespace"] = namespace
apex_completions[class_name.lower()]["name"] = class_name
-
+
return apex_completions
@@ -2135,9 +2161,9 @@ def parse_code_coverage(result):
"Coverage": _record.get("Coverage")
}
- code_coverage_desc =("Trigger Or Class Code Coverage:\n" +
- "Select Apex trigger or class name and " +
- "view code coverage by context menu\n")
+ code_coverage_desc = ("Trigger Or Class Code Coverage:\n" +
+ "Select Apex trigger or class name and " +
+ "view code coverage by context menu\n")
# Keep the coverage to local cache, will overwrite the old one
settings = context.get_settings()
@@ -2167,8 +2193,9 @@ def parse_code_coverage(result):
row += "%-*s" % (header_width["Lines"], "%s/%s" % (covered_lines, total_lines))
code_coverage += row + "\n"
- return message.SEPRATE.format(code_coverage_desc + "-"*79 + "\n" +
- columns + "\n"*2 + code_coverage)
+ return message.SEPRATE.format(code_coverage_desc + "-" * 79 + "\n" +
+ columns + "\n" * 2 + code_coverage)
+
def parse_sync_test_coverage(result):
successes = result["successes"]
@@ -2178,7 +2205,7 @@ def parse_sync_test_coverage(result):
allrows = []
if result["failures"]:
allrows.append("Failed Test Methods:")
- for failure in sorted(result["failures"], key=lambda k : k["name"]):
+ for failure in sorted(result["failures"], key=lambda k: k["name"]):
allrows.append("~" * 80)
failure_row = []
failure_row.append("% 30s %-30s " % ("ClassName: ", failure["name"]))
@@ -2193,7 +2220,7 @@ def parse_sync_test_coverage(result):
if result["successes"]:
allrows.append("~" * 80)
allrows.append("Successful Test Methods:")
- for success in sorted(result["successes"], key=lambda k : k["name"]):
+ for success in sorted(result["successes"], key=lambda k: k["name"]):
allrows.append("~" * 80)
success_row = []
success_row.append("% 30s %-30s " % ("ClassName: ", success["name"]))
@@ -2220,7 +2247,8 @@ def parse_sync_test_coverage(result):
coverageRows.append("".join(columns))
coverageRows.append("~" * 80)
codeCoverage = sorted(result["codeCoverage"], reverse=True,
- key=lambda k : 0 if k["numLocations"] == 0 else (k["numLocations"] - k['numLocationsNotCovered']) / k["numLocations"])
+ key=lambda k: 0 if k["numLocations"] == 0 else (k["numLocations"] - k[
+ 'numLocationsNotCovered']) / k["numLocations"])
for coverage in codeCoverage:
coverageRow = []
coverageRow.append("%-*s" % (header_width["Type"], coverage["type"]))
@@ -2232,12 +2260,12 @@ def parse_sync_test_coverage(result):
numLocationsCovered = numLocations - numLocationsNotCovered
percent = numLocationsCovered / numLocations * 100 if numLocations != 0 else 0
coverageRow.append("%-*s" % (
- header_width["Percent"],
+ header_width["Percent"],
"%.2f%%" % percent
))
coverageRow.append("%-*s" % (
header_width["Lines"], "%s/%s" % (
- numLocationsCovered,
+ numLocationsCovered,
numLocations
)
))
@@ -2248,6 +2276,7 @@ def parse_sync_test_coverage(result):
return "\n".join(allrows)
+
def parse_test_result(test_result):
"""
format test result as specified format
@@ -2279,8 +2308,8 @@ def parse_test_result(test_result):
return_result = class_name + test_result_desc + test_result_content[:-1]
# Parse Debug Log Part
- info = "Select LogId and view log detail " +\
- "in Sublime or Salesforce by context menu"
+ info = "Select LogId and view log detail " + \
+ "in Sublime or Salesforce by context menu"
debug_log_content = "LogId: "
if len(test_result) > 0 and test_result[0]["ApexLogId"] != None:
debug_log_content += test_result[0]["ApexLogId"]
@@ -2290,6 +2319,7 @@ def parse_test_result(test_result):
return return_result
+
def parse_validation_rule(settings, sobjects):
""" Parse the validation rule in Sobject.object to csv
@@ -2306,8 +2336,8 @@ def parse_validation_rule(settings, sobjects):
# Initiate CSV Writer and Write headers
columns = settings["validation_rule_columns"]
with open(outputdir + "/ValidationRules.csv", "wb") as fp:
- fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
- fp.write(",".join(columns).encode("utf-8") + b"\n") # Write Header
+ fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
+ fp.write(",".join(columns).encode("utf-8") + b"\n") # Write Header
# Open workflow source file
validation_rule_path = settings["workspace"] + "/src/objects"
@@ -2330,6 +2360,7 @@ def parse_validation_rule(settings, sobjects):
# If one sobject doesn't have vr, We don't need do anything
pass
+
def parse_workflow_metadata(settings, sobjects):
"""Parse Sobject.workflow to csv, including rule, field update and alerts
@@ -2377,8 +2408,8 @@ def parse_workflow_metadata(settings, sobjects):
# Write Header
with open(rule_outputdir, "wb") as fp:
- fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
- fp.write(",".join([(c[0].upper() + c[1:]) for c in columns]).encode("utf-8") + b"\n") # Write Header
+ fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
+ fp.write(",".join([(c[0].upper() + c[1:]) for c in columns]).encode("utf-8") + b"\n") # Write Header
# Append Body
rule_path = settings["workspace"] + "/src/workflows"
@@ -2397,6 +2428,7 @@ def parse_workflow_metadata(settings, sobjects):
# If one sobject doesn't have vr, We don't need do anything
pass
+
def write_metadata_to_csv(fp, columns, metadata, sobject):
""" This method is invoked by function in this module
@@ -2447,7 +2479,7 @@ def write_metadata_to_csv(fp, columns, metadata, sobject):
else:
value = " ".join(cell_value) + "\n"
- cell_value = value[ : -1]
+ cell_value = value[: -1]
else:
cell_value = ""
elif not cell_value:
@@ -2456,8 +2488,8 @@ def write_metadata_to_csv(fp, columns, metadata, sobject):
cell_value = "%s" % cell_value
# Unescape special code to normal
- cell_value = urllib.parse.unquote(unescape(cell_value,
- {"'": "'", """: '"'}))
+ cell_value = urllib.parse.unquote(unescape(cell_value,
+ {"'": "'", """: '"'}))
# Append cell_value to list in order to write list to csv
if '"' in cell_value:
@@ -2470,9 +2502,10 @@ def write_metadata_to_csv(fp, columns, metadata, sobject):
row_value_bin = ",".join(row_value)
row_values += row_value_bin.encode("utf-8") + b"\n"
- fp.write(row_values) # Write Body
+ fp.write(row_values) # Write Body
fp.close()
+
def list2csv(file_path, records, NOT_INCLUDED_COLUMNS=["urls", "attributes"]):
"""convert simple dict in list to csv
@@ -2496,6 +2529,7 @@ def list2csv(file_path, records, NOT_INCLUDED_COLUMNS=["urls", "attributes"]):
values.append(('"%s"' % none_value(record[strk])).encode("utf-8"))
fp.write(b",".join(values) + b"\n")
+
def json2csv(_list, NOT_INCLUDED_COLUMNS=["urls", "attributes"]):
"""convert simple dict in list to csv
@@ -2520,6 +2554,7 @@ def json2csv(_list, NOT_INCLUDED_COLUMNS=["urls", "attributes"]):
return csv_content
+
def parse_data_template_vertical(output_file_dir, result):
"""Parse the data template to csv by page layout
@@ -2569,7 +2604,7 @@ def parse_data_template_vertical(output_file_dir, result):
for picklist in details["picklistValues"]:
picklist_labels.append(picklist["label"])
picklist_values.append(picklist["value"])
-
+
fields_picklist_labels.append('"%s"' % "\n".join(picklist_labels))
fields_picklist_values.append('"%s"' % "\n".join(picklist_values))
@@ -2584,6 +2619,7 @@ def parse_data_template_vertical(output_file_dir, result):
fp.write(",".join(fields_picklist_labels).encode("utf-8") + b"\n")
fp.write(",".join(fields_picklist_values).encode("utf-8") + b"\n")
+
def parse_data_template_horizontal(output_file_dir, result):
"""Parse the data template to csv by page layout
@@ -2623,7 +2659,7 @@ def parse_data_template_horizontal(output_file_dir, result):
for picklist in details["picklistValues"]:
picklist_labels.append(picklist["label"])
picklist_values.append(picklist["value"])
-
+
row = []
row.append(details["label"])
row.append(details["name"])
@@ -2639,6 +2675,7 @@ def parse_data_template_horizontal(output_file_dir, result):
fp.write(u'\ufeff'.encode('utf8'))
fp.write("\n".join(rows).encode("utf-8"))
+
def get_soql_fields(soql):
""" Get the field list of soql
@@ -2653,10 +2690,10 @@ def get_soql_fields(soql):
return []
fieldstr = match.group(0).strip()[6:-4].replace("\n", "").replace("\t", "")
- print (fieldstr.split(','))
+ print(fieldstr.split(','))
fields = []
- expr_fields = [] # Aggregate Fields
+ expr_fields = [] # Aggregate Fields
for f in fieldstr.split(','):
f = f.strip()
if " " in f:
@@ -2670,15 +2707,16 @@ def get_soql_fields(soql):
for idx in range(0, len(expr_fields)):
fields.append('expr%s' % idx)
- print (fields)
+ print(fields)
return fields
+
def query_to_csv(result, soql):
records = result["records"]
if not records:
return b"No matched rows"
-
+
# Get CSV headers,
# If we use * to fetch all fields
if re.compile("select\\s+\\*\\s+from[\\s\\t]+\\w+", re.I).match(soql):
@@ -2710,7 +2748,8 @@ def query_to_csv(result, soql):
rows += ",".join(row).encode("utf-8") + b"\n"
return rows
-
+
+
def parse_execute_anonymous_xml(result):
"""Return the formatted anonymous execute result
@@ -2730,14 +2769,15 @@ def parse_execute_anonymous_xml(result):
line = result["line"]
column = result["column"]
compileProblem = result["compileProblem"]
- view_result = compileProblem + " at line " + line +\
- " column " + column
+ view_result = compileProblem + " at line " + line + \
+ " column " + column
- view_result = urllib.parse.unquote(unescape(view_result,
- {"'": "'", """: '"'}))
+ view_result = urllib.parse.unquote(unescape(view_result,
+ {"'": "'", """: '"'}))
return view_result
+
def generate_workbook(result, workspace, workbook_field_describe_columns):
""" generate workbook for sobject according to user customized columns
you can change the workbook_field_describe_columns in default settings
@@ -2763,21 +2803,21 @@ def generate_workbook(result, workspace, workbook_field_describe_columns):
# Create new csv file for this workbook
# fp = open(outputdir + "/" + sobject + ".csv", "wb", newline='')
workbook_dir = outputdir + "/" + sobject + ".csv"
-
- #------------------------------------------------------------
+
+ # ------------------------------------------------------------
# Headers, all headers are capitalized
- #------------------------------------------------------------
+ # ------------------------------------------------------------
headers = [column.capitalize() for column in fields_key]
# Write Header
fp = open(workbook_dir, "wb")
- fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
- fp.write(",".join(headers).encode("utf-8") + b"\n") # Write Header
+ fp.write(u'\ufeff'.encode('utf8')) # Write BOM Header
+ fp.write(",".join(headers).encode("utf-8") + b"\n") # Write Header
- #------------------------------------------------------------
+ # ------------------------------------------------------------
# Fields Part (All rows are sorted by field label)
- #------------------------------------------------------------
- fields = sorted(fields, key=lambda k : k['label'])
+ # ------------------------------------------------------------
+ fields = sorted(fields, key=lambda k: k['label'])
for field in fields:
row_value_literal = b""
row_values = []
@@ -2789,7 +2829,7 @@ def generate_workbook(result, workspace, workbook_field_describe_columns):
for key in fields_key:
# Get field value by field API(key)
row_value = field.get(key)
-
+
if isinstance(row_value, list):
if key == "picklistValues":
value = ''
@@ -2810,8 +2850,8 @@ def generate_workbook(result, workspace, workbook_field_describe_columns):
row_value = field_type if key == "type" else "%s" % row_value
# Unescape special code to normal
- row_value = urllib.parse.unquote(unescape(row_value,
- {"'": "'", """: '"'}))
+ row_value = urllib.parse.unquote(unescape(row_value,
+ {"'": "'", """: '"'}))
# Append row_value to list in order to write list to csv
if '"' in row_value:
@@ -2828,16 +2868,17 @@ def generate_workbook(result, workspace, workbook_field_describe_columns):
fp.close()
# Display Success Message
- sublime.set_timeout(lambda:sublime.status_message(sobject + " workbook is generated"), 10)
+ sublime.set_timeout(lambda: sublime.status_message(sobject + " workbook is generated"), 10)
# Return outputdir
return outputdir
+
record_keys = ["label", "name", "type", "length"]
record_key_width = {
- "label": 40,
- "name": 40,
- "type": 20,
+ "label": 40,
+ "name": 40,
+ "type": 20,
"length": 7
}
recordtype_key_width = {
@@ -2854,6 +2895,8 @@ def generate_workbook(result, workspace, workbook_field_describe_columns):
}
seprate = 100 * "-" + "\n"
+
+
def parse_sobject_field_result(result):
"""According to sobject describe result, display record type information,
child sobjects information and the field information.
@@ -2870,9 +2913,9 @@ def parse_sobject_field_result(result):
# View Name or Header
view_result = sobject + " Describe:\n"
- #------------------------------------------------
+ # ------------------------------------------------
# Fields Part
- #------------------------------------------------
+ # ------------------------------------------------
# Output totalSize Part
fields = result.get("fields")
view_result += seprate
@@ -2889,15 +2932,15 @@ def parse_sobject_field_result(result):
view_result += len(columns) * "-" + "\n"
# Sort fields list by lable of every field
- fields = sorted(fields, key=lambda k : k['label'])
+ fields = sorted(fields, key=lambda k: k['label'])
# Output field values
for record in fields:
row = ""
for key in record_keys:
row_value = "Formula(%s)" % record.get(key) if key == "type" \
- and record["calculatedFormula"] else record.get(key)
-
+ and record["calculatedFormula"] else record.get(key)
+
if not row_value:
row_value = ""
@@ -2908,9 +2951,9 @@ def parse_sobject_field_result(result):
view_result += row + "\n"
view_result += "\n"
- #------------------------------------------------
+ # ------------------------------------------------
# Record Type Part
- #------------------------------------------------
+ # ------------------------------------------------
recordtypes = result.get("recordTypeInfos")
view_result += seprate
view_result += "Record Type Info: \t" + str(len(recordtypes)) + "\n"
@@ -2935,7 +2978,7 @@ def parse_sobject_field_result(result):
row = ""
for key in recordtype_keys:
if key not in recordtype_key_width: continue
-
+
# Get field value by field API
# and convert it to str
row_value = recordtype.get(key)
@@ -2945,14 +2988,14 @@ def parse_sobject_field_result(result):
key_width = recordtype_key_width[key]
row_value = "%-*s" % (key_width, row_value)
row += row_value
-
+
view_result += row + "\n"
view_result += "\n"
- #------------------------------------------------
+ # ------------------------------------------------
# Child Relationship
- #------------------------------------------------
+ # ------------------------------------------------
childRelationships = result.get("childRelationships")
view_result += seprate
view_result += "ChildRelationships Info: \t" + str(len(childRelationships)) + "\n"
@@ -2978,13 +3021,14 @@ def parse_sobject_field_result(result):
row_value = "%-*s" % (30, row_value)
row += row_value
-
+
view_result += row + "\n"
view_result += "\n"
return view_result
+
def getUniqueElementValueFromXmlString(xmlString, elementName):
"""
Extracts an element value from an XML string.
@@ -2997,17 +3041,18 @@ def getUniqueElementValueFromXmlString(xmlString, elementName):
elementsByName = xmlStringAsDom.getElementsByTagName(elementName)
elementValue = None
if len(elementsByName) > 0:
- elementValue = elementsByName[0].toxml().replace('<' +\
- elementName + '>','').replace('' + elementName + '>','')
+ elementValue = elementsByName[0].toxml().replace('<' + \
+ elementName + '>', '').replace('' + elementName + '>', '')
else:
elementValue = xmlString.decode("utf-8")
return unescape(elementValue, {"'": "'", """: '"'})
+
def get_response_error(response):
# Debug Message
settings = context.get_settings()
if settings["debug_mode"]:
- print (response.content)
+ print(response.content)
content = response.content
result = {"success": False}
@@ -3020,6 +3065,7 @@ def get_response_error(response):
result["Error Message"] = response.content
return result
+
def get_path_attr(path_or_file):
"""Return project name and component folder attribute
@@ -3046,18 +3092,29 @@ def get_path_attr(path_or_file):
return project_name, metadata_folder
+
def get_file_attributes(file_name):
+ """
+ get file attribute from the file_name (file path)
+ @param file_name: file path, usually from view.file_name()
+ @return: dict with following attributes:
+ name(file name without extension),
+ extension,
+ full name (name with extension),
+ metadata folder(metadata type folder, like aura/class/lwc/pages/triggers),
+ folder (Lightning bundle folder, usually is the Lightning Aura/Web Component name)
+ """
attributes = {}
base, fullName = os.path.split(file_name)
if "." in fullName:
name = fullName[:fullName.rfind(".")]
- extension = fullName[fullName.rfind(".")+1:]
+ extension = fullName[fullName.rfind(".") + 1:]
else:
name, extension = fullName, ""
attributes["fullName"] = fullName
attributes["name"] = name
attributes["extension"] = extension
-
+
base, folder = os.path.split(base)
base, metafolder_or_src = os.path.split(base)
@@ -3073,6 +3130,7 @@ def get_file_attributes(file_name):
return attributes
+
def get_metadata_folder(file_name):
""" Get the metadata_folder by file_name
@@ -3088,6 +3146,7 @@ def get_metadata_folder(file_name):
attributes = get_file_attributes(file_name)
return attributes["metadata_folder"]
+
def load_metadata_cache(reload_cache=False, username=None):
""" Reload component cache in globals()
"""
@@ -3099,6 +3158,7 @@ def load_metadata_cache(reload_cache=False, username=None):
return globals()["components"]
+
def get_component_attribute(file_name, switch=True, reload_cache=False):
"""
get the component name by file_name, and then get the component_url and component_id
@@ -3133,7 +3193,7 @@ def get_component_attribute(file_name, switch=True, reload_cache=False):
# Check whether project of current file is active project
default_project_name = settings["default_project_name"]
- if switch and default_project_name.lower() not in file_name.lower():
+ if switch and default_project_name.lower() not in file_name.lower():
return None, None
xml_name = settings[metadata_folder]["xmlName"]
@@ -3147,6 +3207,60 @@ def get_component_attribute(file_name, switch=True, reload_cache=False):
# Return tuple
return (component_attribute, name)
+
+def delete_component_attribute(dirs_or_files, switch=True):
+ """
+ Delete component metadata cache for given files or directory(Lightning bundle)
+ @param dirs_or_files: lightning direcotry(bundle) or files
+ @param switch:
+ @return:
+ """
+
+ def remove_component_cache(file_path):
+ file_attr = get_file_attributes(file_path)
+ metadata_folder = file_attr["metadata_folder"]
+ full_name = file_attr["fullName"]
+ folder = file_attr.get("folder", None)
+
+ xml_name = settings[metadata_folder]["xmlName"]
+ if xml_name in components_dict:
+ components_dict[xml_name].pop(full_name.lower(), None)
+ # Following code will be useful for future component attribute structure
+ if folder:
+ components_dict[xml_name].pop(folder.lower() + full_name.lower(), None)
+
+ settings = context.get_settings()
+ username = settings["username"]
+ s = sublime.load_settings(context.COMPONENT_METADATA_SETTINGS)
+ if not s.has(username):
+ return
+ components_dict = s.get(username, {})
+
+ # Check whether project of current file is active project
+ for _path in dirs_or_files:
+ default_project_name = settings["default_project_name"]
+ if switch and default_project_name.lower() not in _path.lower():
+ print('not all current project')
+ return
+
+ # loop through the files and delete component cache
+ for _path in dirs_or_files:
+ # delete the component metadata for the file
+ if os.path.isfile(_path):
+ remove_component_cache(_path)
+ else:
+ files = [f for f in os.listdir(_path) if os.path.isfile(os.path.join(_path, f))]
+ for _file in files:
+ remove_component_cache(os.path.join(_path, _file))
+
+ # Update component metadata
+ s.set(username, components_dict)
+ sublime.save_settings(context.COMPONENT_METADATA_SETTINGS)
+
+ # Reload component metadata cache in globals()
+ sublime.set_timeout(lambda: load_metadata_cache(True, settings["username"]), 5)
+
+
def check_enabled(file_name, check_cache=True):
"""
Check whether file is ApexTrigger, ApexComponent, ApexPage or ApexClass
@@ -3168,7 +3282,7 @@ def check_enabled(file_name, check_cache=True):
# Check whether current file is subscribed component
attributes = get_file_attributes(file_name)
metadata_folder = attributes["metadata_folder"]
- if metadata_folder not in settings["all_metadata_folders"]:
+ if metadata_folder not in settings["all_metadata_folders"]:
sublime.status_message("Not valid SFDC component")
return False
@@ -3181,24 +3295,26 @@ def check_enabled(file_name, check_cache=True):
# Check whether active component is in active project
if check_cache:
component_attribute, component_name = get_component_attribute(file_name)
- if not component_attribute:
+ if not component_attribute:
sublime.status_message("Not found the attribute of this component")
return False
-
+
return True
+
def display_active_project(view):
""" Display the default project name in the sidebar
"""
settings = context.get_settings()
- if not settings: return # Fix plugin loading issue
+ if not settings: return # Fix plugin loading issue
display_message = "Default Project => %s (v%s.0)" % (
settings["default_project_name"],
settings["api_version"]
)
view.set_status('default_project', display_message)
+
def switch_project(target):
""" Set the default project to the chosen one
"""
@@ -3236,10 +3352,11 @@ def switch_project(target):
# Reload cache for completions
from . import completions
- sublime.set_timeout(lambda:completions.load_sobject_cache(True), 50)
+ sublime.set_timeout(lambda: completions.load_sobject_cache(True), 50)
# Reload cache for component metadata
- sublime.set_timeout(lambda:load_metadata_cache(True), 50)
+ sublime.set_timeout(lambda: load_metadata_cache(True), 50)
+
def add_project_to_workspace(settings):
"""Add new project folder to workspace
@@ -3261,7 +3378,7 @@ def add_project_to_workspace(settings):
if not os.path.exists(workspace): os.makedirs(workspace)
project_file_path = os.path.join(workspace, "%s.sublime-project" % dpn)
with open(project_file_path, "wb") as fp:
- fp.write(json.dumps({"folders":[switch_to_folder]}, indent=4).encode("utf-8"))
+ fp.write(json.dumps({"folders": [switch_to_folder]}, indent=4).encode("utf-8"))
project_data = sublime.active_window().project_data()
if not project_data: project_data = {}
@@ -3273,9 +3390,9 @@ def add_project_to_workspace(settings):
folder_path = folder["path"]
# Parse windows path to AS-UNIX
- if "\\" in folder_path:
+ if "\\" in folder_path:
folder_path = folder_path.replace("\\", "/")
- if "\\" in workspace:
+ if "\\" in workspace:
workspace = workspace.replace("\\", "/")
if folder_path == workspace:
@@ -3339,6 +3456,7 @@ def get_completion_list(meta_type, meta_folder):
return completion_list
+
def get_metadata_elements(metadata_dir, suffix=None):
""" Get the name list by specified metadataObject
@@ -3364,11 +3482,12 @@ def get_metadata_elements(metadata_dir, suffix=None):
return elements
+
def export_role_hierarchy(records):
settings = context.get_settings()
- top_roles = [] # Role hierarchy
- rolemap = {} # Define roleId => role
+ top_roles = [] # Role hierarchy
+ rolemap = {} # Define roleId => role
for r in records:
# Build map
rolemap[r["Id"]] = r
@@ -3378,28 +3497,29 @@ def export_role_hierarchy(records):
# Start to write role name to csv
rows = []
- for role in sorted(top_roles, key=lambda k : k['Name']):
+ for role in sorted(top_roles, key=lambda k: k['Name']):
rows.append(role["Name"])
- append_child_roles(rolemap, role["Id"], rows, 1,
- settings["include_users_in_role_hierarchy"])
+ append_child_roles(rolemap, role["Id"], rows, 1,
+ settings["include_users_in_role_hierarchy"])
- outputdir = settings["workspace"]+ "/.export/Role"
+ outputdir = settings["workspace"] + "/.export/Role"
if not os.path.exists(outputdir):
os.makedirs(outputdir)
- outputfile = outputdir+"/hierarchy.csv"
+ outputfile = outputdir + "/hierarchy.csv"
with open(outputfile, "wb") as fp:
fp.write("\n".join(rows).encode("utf-8"))
return outputfile
+
def append_child_roles(rolemap, role_id, rows, level, include_users):
child_roles = []
for role in rolemap.values():
if role["ParentRoleId"] == role_id:
child_roles.append(role)
- for role in sorted(child_roles, key=lambda k : k['Name']):
+ for role in sorted(child_roles, key=lambda k: k['Name']):
row = level * "," + role["Name"]
# If include_users is true, Include active user list after role name
@@ -3422,6 +3542,7 @@ def append_child_roles(rolemap, role_id, rows, level, include_users):
append_child_roles(rolemap, role["Id"], rows, level + 1, include_users)
+
def export_profile_settings():
settings = context.get_settings()
@@ -3443,9 +3564,9 @@ def export_profile_settings():
# Escape profile name, for example,
# "Custom%3A Sales Profile" changed to "Custom: Sales Profile"
unquoted_profile = urllib.parse.unquote(unescape(profile, {"'": "'", """: '"'}))
- Printer.get("log").write("Parsing the profile security settings of "+unquoted_profile)
+ Printer.get("log").write("Parsing the profile security settings of " + unquoted_profile)
- profile_file = os.path.join(profile_dir, profile+".profile")
+ profile_file = os.path.join(profile_dir, profile + ".profile")
result = xmltodict.parse(open(profile_file, "rb").read())
result = result["Profile"]
@@ -3458,7 +3579,7 @@ def export_profile_settings():
object_permissions = result["objectPermissions"]
# Some profiles just only have one objectPermissions
- if isinstance(result["objectPermissions"], dict):
+ if isinstance(result["objectPermissions"], dict):
object_permissions = [object_permissions]
for op in object_permissions:
@@ -3525,23 +3646,23 @@ def export_profile_settings():
# Get the unescaped profiles
profiles = sorted(list(profile_settings.keys()))
-
+
#########################################
# 1. Export objectPermissions
#########################################
# Define object CRUD
cruds = [
- "allowRead", "allowCreate", "allowEdit",
- "allowDelete", "modifyAllRecords",
+ "allowRead", "allowCreate", "allowEdit",
+ "allowDelete", "modifyAllRecords",
"viewAllRecords"
]
crud_literal = {
- "allowCreate": "C",
- "allowRead": "R",
- "allowEdit": "U",
- "allowDelete": "D",
- "modifyAllRecords": "M",
+ "allowCreate": "C",
+ "allowRead": "R",
+ "allowEdit": "U",
+ "allowDelete": "D",
+ "modifyAllRecords": "M",
"viewAllRecords": "V"
}
@@ -3615,12 +3736,12 @@ def export_profile_settings():
all_rows.append(",".join(rows))
- outputdir = settings["workspace"]+ "/.export/profile"
+ outputdir = settings["workspace"] + "/.export/profile"
if not os.path.exists(outputdir):
os.makedirs(outputdir)
- Printer.get("log").write("Writing profile object security to "+outputdir)
- with open(outputdir+"/ObjectPermissions.csv", "wb") as fp:
+ Printer.get("log").write("Writing profile object security to " + outputdir)
+ with open(outputdir + "/ObjectPermissions.csv", "wb") as fp:
fp.write("\n".join(all_rows).encode("utf-8"))
#########################################
@@ -3646,8 +3767,8 @@ def export_profile_settings():
all_rows.append(",".join(rows))
- Printer.get("log").write("Writing profile tab visibility to "+outputdir)
- with open(outputdir+"/TabVisibilities.csv", "wb") as fp:
+ Printer.get("log").write("Writing profile tab visibility to " + outputdir)
+ with open(outputdir + "/TabVisibilities.csv", "wb") as fp:
fp.write("\n".join(all_rows).encode("utf-8"))
#########################################
@@ -3674,8 +3795,8 @@ def export_profile_settings():
all_rows.append(",".join(rows))
- Printer.get("log").write("Writing profile user permission to "+outputdir)
- with open(outputdir+"/UserPermissions.csv", "wb") as fp:
+ Printer.get("log").write("Writing profile user permission to " + outputdir)
+ with open(outputdir + "/UserPermissions.csv", "wb") as fp:
fp.write("\n".join(all_rows).encode("utf-8"))
#########################################
@@ -3716,14 +3837,15 @@ def export_profile_settings():
# Every field is separated line
all_rows.append(",".join(rows))
- outputdir = settings["workspace"]+ "/.export/profile"
+ outputdir = settings["workspace"] + "/.export/profile"
if not os.path.exists(outputdir):
os.makedirs(outputdir)
- Printer.get("log").write("Writing profile object security to "+outputdir)
- with open(outputdir+"/FieldLevelSecurity.csv", "wb") as fp:
+ Printer.get("log").write("Writing profile object security to " + outputdir)
+ with open(outputdir + "/FieldLevelSecurity.csv", "wb") as fp:
fp.write("\n".join(all_rows).encode("utf-8"))
+
def build_metadata(csvfile, options):
""" Convert JSON to custom labels metadata """
rjson = convert_csv_to_json(csvfile, options.get("xmlNodes"))
@@ -3736,11 +3858,12 @@ def build_metadata(csvfile, options):
return xmltodict.unparse(custom_labels_json)
+
def convert_csv_to_json(csvfile, xmlNodes):
""" Convert CSV to JSON format"""
- fp = open(csvfile, "rt", encoding="utf8"); # Open CSV file
- next(fp) # Ignore header
+ fp = open(csvfile, "rt", encoding="utf8"); # Open CSV file
+ next(fp) # Ignore header
csv_reader = csv.DictReader(fp, xmlNodes)
tempjson = os.path.join(os.path.split(csvfile)[0], "temp.json")