This is an automated email from the ASF dual-hosted git repository.
zhongjiajie pushed a commit to branch main
in repository
https://gitbox.apache.org/repos/asf/dolphinscheduler-sdk-python.git
The following commit(s) were added to refs/heads/main by this push:
new a7ce874 ci: compatible ruff latest version (#144)
a7ce874 is described below
commit a7ce874f3e11e6ef5f0e6dedf2a1caa89f01e806
Author: Jay Chung <[email protected]>
AuthorDate: Fri Apr 26 11:48:09 2024 +0800
ci: compatible ruff latest version (#144)
our ci fail with latest version of ruff in step `lint`
https://github.com/apache/dolphinscheduler-sdk-python/actions/runs/8841147449/job/24277780100
---
setup.py | 2 +-
src/pydolphinscheduler/core/yaml_workflow.py | 18 +++++++++---------
src/pydolphinscheduler/resources_plugin/github.py | 2 +-
src/pydolphinscheduler/resources_plugin/gitlab.py | 2 +-
tests/core/test_yaml_workflow.py | 4 ++--
5 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/setup.py b/setup.py
index a56b672..7637a9c 100644
--- a/setup.py
+++ b/setup.py
@@ -85,7 +85,7 @@ class ApacheRelease(sdist):
f"shasum -a 512 {target_name} > {target_name}.sha512"
)
except DistutilsExecError as e:
- self.warn("Make dist and sign failed: %s" % e)
+ self.warn(f"Make dist and sign failed: {e}")
setup(
diff --git a/src/pydolphinscheduler/core/yaml_workflow.py
b/src/pydolphinscheduler/core/yaml_workflow.py
index a445534..d7c57f1 100644
--- a/src/pydolphinscheduler/core/yaml_workflow.py
+++ b/src/pydolphinscheduler/core/yaml_workflow.py
@@ -63,8 +63,8 @@ class ParseTool:
"""Use $ENV{env_name} to load environment variable "env_name"."""
if "$ENV" in string_param:
key = re.findall(r"\$ENV\{(.*?)\}", string_param)[0]
- env_value = os.environ.get(key, "$%s" % key)
- string_param = string_param.replace("$ENV{%s}" % key, env_value)
+ env_value = os.environ.get(key, f"${key}")
+ string_param = string_param.replace(f"$ENV{{{key}}}", env_value)
return string_param
@staticmethod
@@ -119,7 +119,7 @@ def get_task_cls(task_type) -> Task:
all_task_types = {type_.capitalize(): type_ for type_ in tasks.__all__}
task_type_cap = task_type.capitalize()
if task_type_cap not in all_task_types:
- raise PyDSTaskNoFoundException("cant not find task %s" % task_type)
+ raise PyDSTaskNoFoundException(f"cant not find task {task_type}")
standard_name = all_task_types[task_type_cap]
return getattr(tasks, standard_name)
@@ -248,7 +248,7 @@ class YamlWorkflow(YamlParser):
workflow_path, self._base_folder
)
workflow_name = YamlWorkflow.parse(possible_path)
- content = content.replace('$WORKFLOW{"%s"}' % workflow_path,
workflow_name)
+ content = content.replace(f'$WORKFLOW{{"{workflow_path}"}}',
workflow_name)
return content
@@ -313,7 +313,7 @@ class YamlWorkflow(YamlParser):
condition_datas = task_params["condition"]
conditions = []
for condition_data in condition_datas:
- assert "task" in condition_data, "task must be in %s" %
condition_data
+ assert "task" in condition_data, f"task must be in
{condition_data}"
task_name = condition_data["task"]
condition_string = condition_data.get("condition", None)
@@ -372,7 +372,7 @@ class YamlWorkflow(YamlParser):
elif op.lower() == "or":
cls = Or
else:
- raise Exception("OP must be in And or Or, but get: %s" % op)
+ raise Exception(f"OP must be in And or Or, but get: {op}")
return cls
second_cond_ops = []
@@ -380,8 +380,8 @@ class YamlWorkflow(YamlParser):
second_op = first_group["op"]
task_ops = []
for condition_data in first_group["groups"]:
- assert "task" in condition_data, "task must be in %s" %
condition_data
- assert "flag" in condition_data, "flag must be in %s" %
condition_data
+ assert "task" in condition_data, f"task must be in
{condition_data}"
+ assert "flag" in condition_data, f"flag must be in
{condition_data}"
task_name = condition_data["task"]
flag = condition_data["flag"]
task = name2task[task_name]
@@ -448,7 +448,7 @@ class YamlWorkflow(YamlParser):
elif op.lower() == "or":
cls = Or
else:
- raise Exception("OP must be in And or Or, but get: %s" % op)
+ raise Exception(f"OP must be in And or Or, but get: {op}")
return cls
def create_dependent_item(source_items):
diff --git a/src/pydolphinscheduler/resources_plugin/github.py
b/src/pydolphinscheduler/resources_plugin/github.py
index 369d377..f103d75 100644
--- a/src/pydolphinscheduler/resources_plugin/github.py
+++ b/src/pydolphinscheduler/resources_plugin/github.py
@@ -90,7 +90,7 @@ class GitHub(ResourcePlugin, Git):
"Content-Type": "application/json; charset=utf-8",
}
if self.access_token is not None:
- headers.setdefault("Authorization", "Bearer %s" %
self.access_token)
+ headers.setdefault("Authorization", f"Bearer {self.access_token}")
self.get_git_file_info(path)
response = requests.get(
headers=headers,
diff --git a/src/pydolphinscheduler/resources_plugin/gitlab.py
b/src/pydolphinscheduler/resources_plugin/gitlab.py
index 323b399..d1ee80b 100644
--- a/src/pydolphinscheduler/resources_plugin/gitlab.py
+++ b/src/pydolphinscheduler/resources_plugin/gitlab.py
@@ -89,7 +89,7 @@ class GitLab(ResourcePlugin, Git):
"password": self.password,
}
host = self._git_file_info.host
- resp = requests.post("%s/oauth/token" % host, data=data)
+ resp = requests.post(f"{host}/oauth/token", data=data)
oauth_token = resp.json()["access_token"]
return oauth_token
diff --git a/tests/core/test_yaml_workflow.py b/tests/core/test_yaml_workflow.py
index 42999d0..083f822 100644
--- a/tests/core/test_yaml_workflow.py
+++ b/tests/core/test_yaml_workflow.py
@@ -52,7 +52,7 @@ def test_parse_tool_env_exist(string_param, expect):
def test_parse_tool_env_not_exist():
"""Test parsing the not exist environment variable."""
key = "THIS_ENV_NOT_EXIST_0000000"
- string_param = "$ENV{%s}" % key
+ string_param = f"$ENV{{{key}}}"
expect = "$" + key
assert expect == ParseTool.parse_string_param_if_env(string_param)
@@ -94,7 +94,7 @@ def test_parse_possible_yaml_file():
with open(path) as f:
expect = "".join(f)
- string_param = '$FILE{"%s"}' % file_name
+ string_param = f'$FILE{{"{file_name}"}}'
content_ = ParseTool.parse_string_param_if_file(string_param,
base_folder=folder)
assert expect == content_