Skip to content

Commit 7c206a8

Browse files
authored
Replace assigment with Augmented assignment (#10468)
1 parent 44a36b9 commit 7c206a8

File tree

12 files changed

+16
-16
lines changed

12 files changed

+16
-16
lines changed

airflow/providers/amazon/aws/hooks/datasync.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ def wait_for_task_execution(self, task_execution_arn, max_iterations=2 * 180):
303303
)
304304
status = task_execution["Status"]
305305
self.log.info("status=%s", status)
306-
iterations = iterations - 1
306+
iterations -= 1
307307
if status in self.TASK_EXECUTION_FAILURE_STATES:
308308
break
309309
if status in self.TASK_EXECUTION_SUCCESS_STATES:

airflow/providers/amazon/aws/hooks/logs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def get_log_events(self, log_group, log_stream_name, start_time=0, skip=0, start
8484
events = events[skip:]
8585
skip = 0
8686
else:
87-
skip = skip - event_count
87+
skip -= event_count
8888
events = []
8989

9090
yield from events

airflow/providers/amazon/aws/hooks/sagemaker.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -672,7 +672,7 @@ def check_status(self, job_name, key,
672672

673673
while running:
674674
time.sleep(check_interval)
675-
sec = sec + check_interval
675+
sec += check_interval
676676

677677
try:
678678
response = describe_function(job_name)
@@ -761,7 +761,7 @@ def check_training_status_with_log(self, job_name, non_terminal_states, failed_s
761761

762762
while True:
763763
time.sleep(check_interval)
764-
sec = sec + check_interval
764+
sec += check_interval
765765

766766
state, last_description, last_describe_job_call = \
767767
self.describe_training_job_with_log(job_name, positions, stream_names,

airflow/providers/apache/druid/hooks/druid.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:
120120

121121
time.sleep(self.timeout)
122122

123-
sec = sec + self.timeout
123+
sec += self.timeout
124124

125125
status = req_status.json()['status']['status']
126126
if status == 'RUNNING':

airflow/providers/apache/hive/hooks/hive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def run_cli(self,
211211

212212
with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir:
213213
with NamedTemporaryFile(dir=tmp_dir) as f:
214-
hql = hql + '\n'
214+
hql += '\n'
215215
f.write(hql.encode('UTF-8'))
216216
f.flush()
217217
hive_cmd = self._prepare_cli_cmd()

airflow/providers/apache/spark/hooks/spark_submit.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -587,7 +587,7 @@ def _start_driver_status_tracking(self) -> None:
587587

588588
if returncode:
589589
if missed_job_status_reports < max_missed_job_status_reports:
590-
missed_job_status_reports = missed_job_status_reports + 1
590+
missed_job_status_reports += 1
591591
else:
592592
raise AirflowException(
593593
"Failed to poll for the driver status {} times: returncode = {}"

airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -317,11 +317,11 @@ def handle_pod_overlap(self, labels, try_numbers_match, launcher, pod_list):
317317
log_line = "found a running pod with labels {} but a different try_number.".format(labels)
318318

319319
if self.reattach_on_restart:
320-
log_line = log_line + " Will attach to this pod and monitor instead of starting new one"
320+
log_line += " Will attach to this pod and monitor instead of starting new one"
321321
self.log.info(log_line)
322322
final_state, result = self.monitor_launched_pod(launcher, pod_list.items[0])
323323
else:
324-
log_line = log_line + "creating pod with labels {} and launcher {}".format(labels, launcher)
324+
log_line += "creating pod with labels {} and launcher {}".format(labels, launcher)
325325
self.log.info(log_line)
326326
final_state, _, result = self.create_new_pod_for_operator(labels, launcher)
327327
return final_state, result

airflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1165,7 +1165,7 @@ def run_grant_dataset_view_access(
11651165
'Granting table %s:%s.%s authorized view access to %s:%s dataset.',
11661166
view_project, view_dataset, view_table, project_id, source_dataset
11671167
)
1168-
dataset.access_entries = dataset.access_entries + [view_access]
1168+
dataset.access_entries += [view_access]
11691169
dataset = self.update_dataset(
11701170
fields=["access"],
11711171
dataset_resource=dataset.to_api_repr(),
@@ -1423,7 +1423,7 @@ def cancel_job(
14231423

14241424
job_complete = False
14251425
while polling_attempts < max_polling_attempts and not job_complete:
1426-
polling_attempts = polling_attempts + 1
1426+
polling_attempts += 1
14271427
job_complete = self.poll_job_complete(job_id)
14281428
if job_complete:
14291429
self.log.info('Job successfully canceled: %s, %s', project_id, job_id)

airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def grab_artifact_from_jenkins(**context):
6161
# The JenkinsJobTriggerOperator store the job url in the xcom variable corresponding to the task
6262
# You can then use it to access things or to get the job number
6363
# This url looks like : http://jenkins_url/job/job_name/job_number/
64-
url = url + "artifact/myartifact.xml" # Or any other artifact name
64+
url += "artifact/myartifact.xml" # Or any other artifact name
6565
request = Request(url)
6666
response = jenkins_server.jenkins_open(request)
6767
return response # We store the artifact content in a xcom variable for later use

airflow/providers/jenkins/operators/jenkins_job_trigger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int:
160160
:return: The build_number corresponding to the triggered job
161161
"""
162162
try_count = 0
163-
location = location + '/api/json'
163+
location += '/api/json'
164164
# TODO Use get_queue_info instead
165165
# once it will be available in python-jenkins (v > 0.4.15)
166166
self.log.info('Polling jenkins queue at the url %s', location)

airflow/providers/oracle/transfers/oracle_to_oracle.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def _execute(self, src_hook, dest_hook, context):
7575
rows_total = 0
7676
rows = cursor.fetchmany(self.rows_chunk)
7777
while len(rows) > 0:
78-
rows_total = rows_total + len(rows)
78+
rows_total += len(rows)
7979
dest_hook.bulk_insert_rows(self.destination_table, rows,
8080
target_fields=target_fields,
8181
commit_every=self.rows_chunk)

airflow/providers/singularity/operators/singularity.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,11 +118,11 @@ def execute(self, context):
118118

119119
# Prepare list of binds
120120
for bind in self.volumes:
121-
self.options = self.options + ['--bind', bind]
121+
self.options += ['--bind', bind]
122122

123123
# Does the user want a custom working directory?
124124
if self.working_dir is not None:
125-
self.options = self.options + ['--workdir', self.working_dir]
125+
self.options += ['--workdir', self.working_dir]
126126

127127
# Export environment before instance is run
128128
for enkey, envar in self.environment.items():

0 commit comments

Comments
 (0)