Skip to content

Commit

Permalink
Inclusive Language (#18349)
Browse files Browse the repository at this point in the history
  • Loading branch information
prakshalj0512 committed Sep 18, 2021
1 parent e81f14b commit e25eea0
Show file tree
Hide file tree
Showing 23 changed files with 34 additions and 34 deletions.
2 changes: 1 addition & 1 deletion COMMITTERS.rst
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ can become the Mentor and guide the proposed candidates on how they can become a

If the committee does not have enough information, requires more time, or requires more evidence of
candidate's eligibility, a mentor, who is not the proposer, is selected to help mentor the candidate
The mentor should try to remain impartial -- his/her goal is to provide the missing evidence and to
The mentor should try to remain impartial -- their goal is to provide the missing evidence and to
try to coach/mentor the candidate to success.

In order to re-raise a candidate vote, both Proposer and Mentor must be in favor. Again,
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/serialized_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def read_all_dags(cls, session: Session = None) -> Dict[str, 'SerializedDAG']:
log.debug("Deserializing DAG: %s", row.dag_id)
dag = row.dag

# Sanity check.
# Coherence check
if dag.dag_id == row.dag_id:
dags[row.dag_id] = dag
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
'Instances': {
'InstanceGroups': [
{
'Name': 'Master node',
'Name': 'Primary node',
'Market': 'SPOT',
'InstanceRole': 'MASTER',
'InstanceType': 'm1.medium',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
'Instances': {
'InstanceGroups': [
{
'Name': 'Master node',
'Name': 'Primary node',
'Market': 'SPOT',
'InstanceRole': 'MASTER',
'InstanceType': 'm1.medium',
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/cloud_memorystore.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def failover_instance(
metadata: Optional[Sequence[Tuple[str, str]]] = None,
):
"""
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
Memorystore for Redis instance.
:param location: The location of the Cloud Memorystore instance (for example europe-west1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def execute(self, context: dict) -> None:

class CloudMemorystoreFailoverInstanceOperator(BaseOperator):
"""
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
Memorystore for Redis instance.
.. seealso::
Expand Down
6 changes: 3 additions & 3 deletions airflow/providers/google/cloud/operators/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,14 +135,14 @@ class ClusterGenerator:
:type optional_components: list[str]
:param num_masters: The # of master nodes to spin up
:type num_masters: int
:param master_machine_type: Compute engine machine type to use for the master node
:param master_machine_type: Compute engine machine type to use for the primary node
:type master_machine_type: str
:param master_disk_type: Type of the boot disk for the master node
:param master_disk_type: Type of the boot disk for the primary node
(default is ``pd-standard``).
Valid values: ``pd-ssd`` (Persistent Disk Solid State Drive) or
``pd-standard`` (Persistent Disk Hard Disk Drive).
:type master_disk_type: str
:param master_disk_size: Disk size for the master node
:param master_disk_size: Disk size for the primary node
:type master_disk_size: int
:param worker_machine_type: Compute engine machine type to use for the worker nodes
:type worker_machine_type: str
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/yandex/operators/yandexcloud_dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class DataprocCreateClusterOperator(BaseOperator):
Service account can be created inside the folder.
:type service_account_id: Optional[str]
:param masternode_resource_preset: Resources preset (CPU+RAM configuration)
for the master node of the cluster.
for the primary node of the cluster.
:type masternode_resource_preset: str
:param masternode_disk_size: Masternode storage size in GiB.
:type masternode_disk_size: int
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def _create_customized_form_field_behaviours_schema_validator():

def _sanity_check(provider_package: str, class_name: str) -> bool:
"""
Performs sanity check on provider classes.
Performs coherence check on provider classes.
For apache-airflow providers - it checks if it starts with appropriate package. For all providers
it tries to import the provider - checking that there are no exceptions during importing.
It logs appropriate warning in case it detects any problems.
Expand All @@ -121,7 +121,7 @@ def _sanity_check(provider_package: str, class_name: str) -> bool:
provider_path = provider_package[len("apache-") :].replace("-", ".")
if not class_name.startswith(provider_path):
log.warning(
"Sanity check failed when importing '%s' from '%s' package. It should start with '%s'",
"Coherence check failed when importing '%s' from '%s' package. It should start with '%s'",
class_name,
provider_package,
provider_path,
Expand Down
2 changes: 1 addition & 1 deletion airflow/utils/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def create_default_connections(session=None):
"InstanceCount": 1
},
{
"Name": "Slave nodes",
"Name": "Core nodes",
"Market": "ON_DEMAND",
"InstanceRole": "CORE",
"InstanceType": "r3.2xlarge",
Expand Down
12 changes: 6 additions & 6 deletions airflow/utils/process_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,19 +159,19 @@ def execute_interactive(cmd: List[str], **kwargs):
tty.setraw(sys.stdin.fileno())

# open pseudo-terminal to interact with subprocess
master_fd, slave_fd = pty.openpty()
primary_fd, secondary_fd = pty.openpty()
try:
# use os.setsid() make it run in a new process group, or bash job control will not be enabled
with subprocess.Popen(
cmd, stdin=slave_fd, stdout=slave_fd, stderr=slave_fd, universal_newlines=True, **kwargs
cmd, stdin=secondary_fd, stdout=secondary_fd, stderr=secondary_fd, universal_newlines=True, **kwargs
) as proc:
while proc.poll() is None:
readable_fbs, _, _ = select.select([sys.stdin, master_fd], [], [])
readable_fbs, _, _ = select.select([sys.stdin, primary_fd], [], [])
if sys.stdin in readable_fbs:
input_data = os.read(sys.stdin.fileno(), 10240)
os.write(master_fd, input_data)
if master_fd in readable_fbs:
output_data = os.read(master_fd, 10240)
os.write(primary_fd, input_data)
if primary_fd in readable_fbs:
output_data = os.read(primary_fd, 10240)
if output_data:
os.write(sys.stdout.fileno(), output_data)
finally:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ The list currently includes Spark, Hadoop, Pig and Hive.
For more information on versions and images take a look at `Cloud Dataproc Image version list <https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions>`__

To submit a job to the cluster you need a provide a job source file. The job source file can be on GCS, the cluster or on your local
file system. You can specify a file:/// path to refer to a local file on a cluster's master node.
file system. You can specify a file:/// path to refer to a local file on a cluster's primary node.

The job configuration can be submitted by using:
:class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`.
Expand Down
2 changes: 1 addition & 1 deletion scripts/docker/install_airflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"

function install_airflow() {
# Sanity check for editable installation mode.
# Coherence check for editable installation mode.
if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \
${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then
echo
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_aws.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ pushd "${TMP_DIR}" && unzip "${TMP_DIR}/awscliv2.zip" && cd aws && \
--bin-dir "/files/bin/" && \
popd

# Sanity check
# Coherence check
if ! command -v aws > /dev/null; then
echo 'Installation failed. The command "aws" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_az.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ chmod a+x /files/opt/az/az

ln -s /files/opt/az/az "${BIN_PATH}"

# Sanity check
# Coherence check
if ! command -v az > /dev/null; then
echo 'Installation failed. The command "az" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_gcloud.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ while IPS='' read -r line; do
ln -sf "${line}" "/files/bin/${BIN_NAME}"
done < <(find "${INSTALL_DIR}/bin/" -type f)

# Sanity check
# Coherence check
if ! command -v gcloud > /dev/null; then
echo 'Installation failed. The command "gcloud" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_imgcat.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ echo "Downloading from ${DOWNLOAD_URL}"
curl -# --fail "${DOWNLOAD_URL}" --output "${BIN_PATH}"
chmod +x "${BIN_PATH}"

# Sanity check
# Coherence check
if ! command -v imgcat > /dev/null; then
echo 'Installation failed. The command "imgcat" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_java.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ while IPS='' read -r line; do
ln -s "${line}" "/files/bin/${BIN_NAME}"
done < <(find "${INSTALL_DIR}/bin/" -type f)

# Sanity check
# Coherence check
if ! command -v java > /dev/null; then
echo 'Installation failed. The command "java" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_kubectl.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ echo "Downloading from ${DOWNLOAD_URL}"
curl -# --fail "${DOWNLOAD_URL}" --output "${BIN_PATH}"
chmod +x "${BIN_PATH}"

# Sanity check
# Coherence check
if ! command -v kubectl > /dev/null; then
echo 'Installation failed. The command "kubectl" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/in_container/bin/install_terraform.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ curl -# --fail "${DOWNLOAD_URL}" --output "${TMP_DIR}/terraform.zip"
echo "Extracting archive"
unzip "${TMP_DIR}/terraform.zip" -d /files/bin

# Sanity check
# Coherence check
if ! command -v terraform > /dev/null; then
echo 'Installation failed. The command "terraform" was not found.'
exit 1
Expand Down
2 changes: 1 addition & 1 deletion tests/core/test_providers_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_providers_are_loaded(self):
version = provider_manager.providers[provider][0]
assert re.search(r'[0-9]*\.[0-9]*\.[0-9]*.*', version)
assert package_name == provider
# just a sanity check - no exact number as otherwise we would have to update
# just a coherence check - no exact number as otherwise we would have to update
# several tests if we add new connections/provider which is not ideal
assert len(provider_list) > 65
assert [] == self._caplog.records
Expand Down
8 changes: 4 additions & 4 deletions tests/models/test_dagbag.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def subdag_1():
return dag

test_dag = standard_subdag()
# sanity check to make sure DAG.subdag is still functioning properly
# coherence check to make sure DAG.subdag is still functioning properly
assert len(test_dag.subdags) == 2

# Perform processing dag
Expand Down Expand Up @@ -503,7 +503,7 @@ def subdag_1():
return dag

test_dag = nested_subdags()
# sanity check to make sure DAG.subdag is still functioning properly
# coherence check to make sure DAG.subdag is still functioning properly
assert len(test_dag.subdags) == 6

# Perform processing dag
Expand Down Expand Up @@ -541,7 +541,7 @@ def basic_cycle():
return dag

test_dag = basic_cycle()
# sanity check to make sure DAG.subdag is still functioning properly
# coherence check to make sure DAG.subdag is still functioning properly
assert len(test_dag.subdags) == 0

# Perform processing dag
Expand Down Expand Up @@ -628,7 +628,7 @@ def subdag_1():
return dag

test_dag = nested_subdag_cycle()
# sanity check to make sure DAG.subdag is still functioning properly
# coherence check to make sure DAG.subdag is still functioning properly
assert len(test_dag.subdags) == 6

# Perform processing dag
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_dagrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -737,7 +737,7 @@ def test_next_dagruns_to_examine_only_unpaused(self, state):
def test_no_scheduling_delay_for_nonscheduled_runs(self, stats_mock):
"""
Tests that dag scheduling delay stat is not called if the dagrun is not a scheduled run.
This case is manual run. Simple test for sanity check.
This case is manual run. Simple test for coherence check.
"""
dag = DAG(dag_id='test_dagrun_stats', start_date=days_ago(1))
dag_task = DummyOperator(task_id='dummy', dag=dag)
Expand Down

0 comments on commit e25eea0

Please sign in to comment.