Skip to content

Commit

Permalink
Fix spelling (apache#13130)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbampton authored Dec 17, 2020
1 parent 8d5b434 commit 8529cb1
Show file tree
Hide file tree
Showing 13 changed files with 35 additions and 35 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}

# By default latest released version of airflow is installed (when empty) but this value can be overriden
# By default latest released version of airflow is installed (when empty) but this value can be overridden
# and we can install specific version of airflow this way.
ARG AIRFLOW_INSTALL_VERSION=""
ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION}
Expand Down Expand Up @@ -413,7 +413,7 @@ RUN addgroup --gid "${AIRFLOW_GID}" "airflow" && \
ARG AIRFLOW_HOME
ENV AIRFLOW_HOME=${AIRFLOW_HOME}

# Make Airflow files belong to the root group and are accessible. This is to accomodate the guidelines from
# Make Airflow files belong to the root group and are accessible. This is to accommodate the guidelines from
# OpenShift https://docs.openshift.com/enterprise/3.0/creating_images/guidelines.html
RUN mkdir -pv "${AIRFLOW_HOME}"; \
mkdir -pv "${AIRFLOW_HOME}/dags"; \
Expand Down
2 changes: 1 addition & 1 deletion IMAGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ The entrypoint performs those operations:

* Sets up Kerberos if Kerberos integration is enabled (generates and configures Kerberos token)

* Sets up ssh keys for ssh tests and restarts teh SSH server
* Sets up ssh keys for ssh tests and restarts the SSH server

* Sets all variables and configurations needed for unit tests to run

Expand Down
2 changes: 1 addition & 1 deletion UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -1537,7 +1537,7 @@ Migrated are:

#### `airflow.providers.amazon.aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator`

The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidently set to 's3_default' instead of 'aws_default' in some of the emr operators in previous
The default value for the [aws_conn_id](https://airflow.apache.org/howto/manage-connections.html#amazon-web-services) was accidentally set to 's3_default' instead of 'aws_default' in some of the emr operators in previous
versions. This was leading to EmrStepSensor not being able to find their corresponding emr cluster. With the new
changes in the EmrAddStepsOperator, EmrTerminateJobFlowOperator and EmrCreateJobFlowOperator this issue is
solved.
Expand Down
4 changes: 2 additions & 2 deletions airflow/api_connexion/openapi/v1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1338,7 +1338,7 @@ paths:
get:
summary: Get a instance status
description: |
Get the status of Airflow's metadatabase and scheduler. It incluse info about
Get the status of Airflow's metadatabase and scheduler. It includes info about
metadatabase and last heartbeat of scheduler.
x-openapi-router-controller: airflow.api_connexion.endpoints.health_endpoint
operationId: get_health
Expand Down Expand Up @@ -1883,7 +1883,7 @@ components:
$ref: '#/components/schemas/XComCollectionItem'

XCom:
description: Full representaiton of XCom entry.
description: Full representations of XCom entry.
allOf:
- $ref: '#/components/schemas/XComCollectionItem'
- type: object
Expand Down
2 changes: 1 addition & 1 deletion airflow/sentry.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(self):
sentry_config_opts.pop("sentry_on")
old_way_dsn = sentry_config_opts.pop("sentry_dsn", None)
new_way_dsn = sentry_config_opts.pop("dsn", None)
# supported backward compability with old way dsn option
# supported backward compatibility with old way dsn option
dsn = old_way_dsn or new_way_dsn

unsupported_options = self.UNSUPPORTED_SENTRY_OPTIONS.intersection(sentry_config_opts.keys())
Expand Down
2 changes: 1 addition & 1 deletion airflow/www/templates/airflow/tree.html
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@
}

var devicePixelRatio = window.devicePixelRatio || 1;
// JSON.parse is faster for large payloads than an object literal (because the JSON grammer is simpler!)
// JSON.parse is faster for large payloads than an object literal (because the JSON grammar is simpler!)
var data = JSON.parse({{ data|tojson }});
var barHeight = 20;
var axisHeight = 40;
Expand Down
2 changes: 1 addition & 1 deletion docs/apache-airflow/concepts.rst
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ Task ids are generated by appending a number at the end of the original task id.
the following task ids: ``[update_user, update_user__1, update_user__2, ... update_user__n]``.

Due to dynamic nature of the ids generations users should be aware that changing a DAG by adding or removing additional
invocations of task-decorated function may change ``task_id`` of other task of the same type withing a single DAG.
invocations of task-decorated function may change ``task_id`` of other task of the same type within a single DAG.

For example, if there are many task-decorated tasks without explicitly given task_id. Their ``task_id`` will be
generated sequentially: ``task__1``, ``task__2``, ``task__3``, etc. After the DAG goes into production, one day
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/libraries/_build_images.sh
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ function build_images::confirm_image_rebuild() {
fi
if [[ -f "${LAST_FORCE_ANSWER_FILE}" ]]; then
# set variable from last answered response given in the same pre-commit run - so that it can be
# answered in teh first pre-commit check (build) and then used in another (pylint/mypy/flake8 etc).
# answered in the first pre-commit check (build) and then used in another (pylint/mypy/flake8 etc).
# shellcheck disable=SC1090
source "${LAST_FORCE_ANSWER_FILE}"
fi
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version

############################################################################################################
# IMPORTANT NOTE!!!!!!!!!!!!!!!
# If you have a 'pip check' problem with dependencies, it might be becasue some dependency has been
# If you have a 'pip check' problem with dependencies, it might be because some dependency has been
# installed via 'install_requires' in setup.cfg in higher version than required in one of the options below.
# For example pip check was failing with requests=2.25.1 installed even if in some dependencies below
# < 2.24.0 was specified for it. Solution in such case is to add such limiting requirement to
Expand Down
2 changes: 1 addition & 1 deletion tests/core/test_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def test_getsection(self):

self.assertEqual(
None,
test_conf.getsection('non_existant_secion'),
test_conf.getsection('non_existent_section'),
)

def test_get_section_should_respect_cmd_env_variable(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/lineage/test_lineage.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def test_lineage(self):

def test_lineage_render(self):
# tests inlets / outlets are rendered if they are added
# after initalization
# after initialization
dag = DAG(dag_id='test_lineage_render', start_date=DEFAULT_DATE)

with dag:
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def create_task_instance():
@provide_session
def test_ti_updates_with_task(self, session=None):
"""
test that updating the executor_config propogates to the TaskInstance DB
test that updating the executor_config propagates to the TaskInstance DB
"""
with models.DAG(dag_id='test_run_pooling_task') as dag:
task = DummyOperator(
Expand Down
42 changes: 21 additions & 21 deletions tests/providers/google/suite/hooks/test_sheets.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id

GCP_CONN_ID = 'test'
SPREADHSEET_ID = '1234567890'
SPREADSHEET_ID = '1234567890'
RANGE_ = 'test!A:E'
RANGES = ['test!A:Q', 'test!R:Z']
VALUES = [[1, 2, 3]]
Expand Down Expand Up @@ -66,7 +66,7 @@ def test_get_values(self, get_conn):
execute_method = get_method.return_value.execute
execute_method.return_value = {"values": VALUES}
result = self.hook.get_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
major_dimension=MAJOR_DIMENSION,
value_render_option=VALUE_RENDER_OPTION,
Expand All @@ -75,7 +75,7 @@ def test_get_values(self, get_conn):
self.assertIs(result, VALUES)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
get_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
majorDimension=MAJOR_DIMENSION,
valueRenderOption=VALUE_RENDER_OPTION,
Expand All @@ -88,7 +88,7 @@ def test_batch_get_values(self, get_conn):
execute_method = batch_get_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_get_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=RANGES,
major_dimension=MAJOR_DIMENSION,
value_render_option=VALUE_RENDER_OPTION,
Expand All @@ -97,7 +97,7 @@ def test_batch_get_values(self, get_conn):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_get_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
ranges=RANGES,
majorDimension=MAJOR_DIMENSION,
valueRenderOption=VALUE_RENDER_OPTION,
Expand All @@ -110,7 +110,7 @@ def test_update_values(self, get_conn):
execute_method = update_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
values=VALUES,
major_dimension=MAJOR_DIMENSION,
Expand All @@ -123,7 +123,7 @@ def test_update_values(self, get_conn):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
update_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
valueInputOption=VALUE_INPUT_OPTION,
includeValuesInResponse=INCLUDE_VALUES_IN_RESPONSE,
Expand All @@ -138,7 +138,7 @@ def test_batch_update_values(self, get_conn):
execute_method = batch_update_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=RANGES,
values=VALUES_BATCH,
major_dimension=MAJOR_DIMENSION,
Expand All @@ -160,7 +160,7 @@ def test_batch_update_values(self, get_conn):
}
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_update_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, body=body)
batch_update_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, body=body)

@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_batch_update_values_with_bad_data(self, get_conn):
Expand All @@ -169,7 +169,7 @@ def test_batch_update_values_with_bad_data(self, get_conn):
execute_method.return_value = API_RESPONSE
with self.assertRaises(AirflowException) as cm:
self.hook.batch_update_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
ranges=['test!A1:B2', 'test!C1:C2'],
values=[[1, 2, 3]], # bad data
major_dimension=MAJOR_DIMENSION,
Expand All @@ -189,7 +189,7 @@ def test_append_values(self, get_conn):
execute_method = append_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.append_values(
spreadsheet_id=SPREADHSEET_ID,
spreadsheet_id=SPREADSHEET_ID,
range_=RANGE_,
values=VALUES,
major_dimension=MAJOR_DIMENSION,
Expand All @@ -203,7 +203,7 @@ def test_append_values(self, get_conn):
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
append_method.assert_called_once_with(
spreadsheetId=SPREADHSEET_ID,
spreadsheetId=SPREADSHEET_ID,
range=RANGE_,
valueInputOption=VALUE_INPUT_OPTION,
insertDataOption=INSERT_DATA_OPTION,
Expand All @@ -218,31 +218,31 @@ def test_clear_values(self, get_conn):
clear_method = get_conn.return_value.spreadsheets.return_value.values.return_value.clear
execute_method = clear_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.clear(spreadsheet_id=SPREADHSEET_ID, range_=RANGE_)
result = self.hook.clear(spreadsheet_id=SPREADSHEET_ID, range_=RANGE_)

self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
clear_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, range=RANGE_)
clear_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, range=RANGE_)

@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_batch_clear_values(self, get_conn):
batch_clear_method = get_conn.return_value.spreadsheets.return_value.values.return_value.batchClear
execute_method = batch_clear_method.return_value.execute
execute_method.return_value = API_RESPONSE
result = self.hook.batch_clear(spreadsheet_id=SPREADHSEET_ID, ranges=RANGES)
result = self.hook.batch_clear(spreadsheet_id=SPREADSHEET_ID, ranges=RANGES)
body = {"ranges": RANGES}
self.assertIs(result, API_RESPONSE)
execute_method.assert_called_once_with(num_retries=NUM_RETRIES)
batch_clear_method.assert_called_once_with(spreadsheetId=SPREADHSEET_ID, body=body)
batch_clear_method.assert_called_once_with(spreadsheetId=SPREADSHEET_ID, body=body)

@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
def test_get_spreadsheet(self, mock_get_conn):
get_mock = mock_get_conn.return_value.spreadsheets.return_value.get
get_mock.return_value.execute.return_value = API_RESPONSE

result = self.hook.get_spreadsheet(spreadsheet_id=SPREADHSEET_ID)
result = self.hook.get_spreadsheet(spreadsheet_id=SPREADSHEET_ID)

get_mock.assert_called_once_with(spreadsheetId=SPREADHSEET_ID)
get_mock.assert_called_once_with(spreadsheetId=SPREADSHEET_ID)
assert result == API_RESPONSE

@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_spreadsheet")
Expand All @@ -251,11 +251,11 @@ def test_get_sheet_titles(self, mock_get_spreadsheet):
sheet2 = {"properties": {"title": "title2"}}
mock_get_spreadsheet.return_value = {"sheets": [sheet1, sheet2]}

result = self.hook.get_sheet_titles(spreadsheet_id=SPREADHSEET_ID)
mock_get_spreadsheet.assert_called_once_with(spreadsheet_id=SPREADHSEET_ID)
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADSHEET_ID)
mock_get_spreadsheet.assert_called_once_with(spreadsheet_id=SPREADSHEET_ID)
assert result == ["title1", "title2"]

result = self.hook.get_sheet_titles(spreadsheet_id=SPREADHSEET_ID, sheet_filter=["title1"])
result = self.hook.get_sheet_titles(spreadsheet_id=SPREADSHEET_ID, sheet_filter=["title1"])
assert result == ["title1"]

@mock.patch("airflow.providers.google.suite.hooks.sheets.GSheetsHook.get_conn")
Expand Down

0 comments on commit 8529cb1

Please sign in to comment.