Skip to content

Commit

Permalink
Replace assigment with Augmented assignment (#10468)
Browse files Browse the repository at this point in the history
  • Loading branch information
kaxil committed Aug 22, 2020
1 parent 44a36b9 commit 7c206a8
Show file tree
Hide file tree
Showing 12 changed files with 16 additions and 16 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/datasync.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def wait_for_task_execution(self, task_execution_arn, max_iterations=2 * 180):
)
status = task_execution["Status"]
self.log.info("status=%s", status)
iterations = iterations - 1
iterations -= 1
if status in self.TASK_EXECUTION_FAILURE_STATES:
break
if status in self.TASK_EXECUTION_SUCCESS_STATES:
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def get_log_events(self, log_group, log_stream_name, start_time=0, skip=0, start
events = events[skip:]
skip = 0
else:
skip = skip - event_count
skip -= event_count
events = []

yield from events
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/hooks/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ def check_status(self, job_name, key,

while running:
time.sleep(check_interval)
sec = sec + check_interval
sec += check_interval

try:
response = describe_function(job_name)
Expand Down Expand Up @@ -761,7 +761,7 @@ def check_training_status_with_log(self, job_name, non_terminal_states, failed_s

while True:
time.sleep(check_interval)
sec = sec + check_interval
sec += check_interval

state, last_description, last_describe_job_call = \
self.describe_training_job_with_log(job_name, positions, stream_names,
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/apache/druid/hooks/druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:

time.sleep(self.timeout)

sec = sec + self.timeout
sec += self.timeout

status = req_status.json()['status']['status']
if status == 'RUNNING':
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/apache/hive/hooks/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def run_cli(self,

with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as f:
hql = hql + '\n'
hql += '\n'
f.write(hql.encode('UTF-8'))
f.flush()
hive_cmd = self._prepare_cli_cmd()
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/apache/spark/hooks/spark_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ def _start_driver_status_tracking(self) -> None:

if returncode:
if missed_job_status_reports < max_missed_job_status_reports:
missed_job_status_reports = missed_job_status_reports + 1
missed_job_status_reports += 1
else:
raise AirflowException(
"Failed to poll for the driver status {} times: returncode = {}"
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,11 +317,11 @@ def handle_pod_overlap(self, labels, try_numbers_match, launcher, pod_list):
log_line = "found a running pod with labels {} but a different try_number.".format(labels)

if self.reattach_on_restart:
log_line = log_line + " Will attach to this pod and monitor instead of starting new one"
log_line += " Will attach to this pod and monitor instead of starting new one"
self.log.info(log_line)
final_state, result = self.monitor_launched_pod(launcher, pod_list.items[0])
else:
log_line = log_line + "creating pod with labels {} and launcher {}".format(labels, launcher)
log_line += "creating pod with labels {} and launcher {}".format(labels, launcher)
self.log.info(log_line)
final_state, _, result = self.create_new_pod_for_operator(labels, launcher)
return final_state, result
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -1165,7 +1165,7 @@ def run_grant_dataset_view_access(
'Granting table %s:%s.%s authorized view access to %s:%s dataset.',
view_project, view_dataset, view_table, project_id, source_dataset
)
dataset.access_entries = dataset.access_entries + [view_access]
dataset.access_entries += [view_access]
dataset = self.update_dataset(
fields=["access"],
dataset_resource=dataset.to_api_repr(),
Expand Down Expand Up @@ -1423,7 +1423,7 @@ def cancel_job(

job_complete = False
while polling_attempts < max_polling_attempts and not job_complete:
polling_attempts = polling_attempts + 1
polling_attempts += 1
job_complete = self.poll_job_complete(job_id)
if job_complete:
self.log.info('Job successfully canceled: %s, %s', project_id, job_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def grab_artifact_from_jenkins(**context):
# The JenkinsJobTriggerOperator store the job url in the xcom variable corresponding to the task
# You can then use it to access things or to get the job number
# This url looks like : http://jenkins_url/job/job_name/job_number/
url = url + "artifact/myartifact.xml" # Or any other artifact name
url += "artifact/myartifact.xml" # Or any other artifact name
request = Request(url)
response = jenkins_server.jenkins_open(request)
return response # We store the artifact content in a xcom variable for later use
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/jenkins/operators/jenkins_job_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int:
:return: The build_number corresponding to the triggered job
"""
try_count = 0
location = location + '/api/json'
location += '/api/json'
# TODO Use get_queue_info instead
# once it will be available in python-jenkins (v > 0.4.15)
self.log.info('Polling jenkins queue at the url %s', location)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/oracle/transfers/oracle_to_oracle.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def _execute(self, src_hook, dest_hook, context):
rows_total = 0
rows = cursor.fetchmany(self.rows_chunk)
while len(rows) > 0:
rows_total = rows_total + len(rows)
rows_total += len(rows)
dest_hook.bulk_insert_rows(self.destination_table, rows,
target_fields=target_fields,
commit_every=self.rows_chunk)
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/singularity/operators/singularity.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,11 @@ def execute(self, context):

# Prepare list of binds
for bind in self.volumes:
self.options = self.options + ['--bind', bind]
self.options += ['--bind', bind]

# Does the user want a custom working directory?
if self.working_dir is not None:
self.options = self.options + ['--workdir', self.working_dir]
self.options += ['--workdir', self.working_dir]

# Export environment before instance is run
for enkey, envar in self.environment.items():
Expand Down

0 comments on commit 7c206a8

Please sign in to comment.