Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion beam/util/build-beam-artifacts.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ readonly BEAM_SOURCE_VERSION="${3:-master}"

function build_job_service() {
./gradlew :beam-runners-flink_2.11-job-server:shadowJar
gsutil cp \
gcloud storage cp \
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

security-medium medium

The gcloud storage cp command uses unquoted variables ${BEAM_JOB_SERVICE_DESTINATION} and ${BEAM_SOURCE_VERSION} on line 18. This can lead to command injection or argument injection if these variables contain shell metacharacters or start with a hyphen. While the change is on line 16, it is part of the same command. It is recommended to quote these variables and use -- to signal the end of options.

Suggested change
gcloud storage cp \
gcloud storage cp -- \
./runners/flink/job-server/build/libs/beam-runners-flink_2.11-job-server-*-SNAPSHOT.jar \
"${BEAM_JOB_SERVICE_DESTINATION}/beam-runners-flink_2.11-job-server-${BEAM_SOURCE_VERSION}-SNAPSHOT.jar"

./runners/flink/job-server/build/libs/beam-runners-flink_2.11-job-server-*-SNAPSHOT.jar \
${BEAM_JOB_SERVICE_DESTINATION}/beam-runners-flink_2.11-job-server-${BEAM_SOURCE_VERSION}-SNAPSHOT.jar
}
Expand Down
2 changes: 1 addition & 1 deletion cloudbuild/run-presubmit-on-k8s.sh
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ if [[ ${exit_code} != 0 ]]; then
LOG_GCS_PATH="gs://${BUCKET}/${BUILD_ID}/logs/${POD_NAME}.log"

echo "Attempting to upload logs to ${LOG_GCS_PATH}"
if kubectl logs "${POD_NAME}" | gsutil cp - "${LOG_GCS_PATH}"; then
if kubectl logs "${POD_NAME}" | gcloud storage cp - "${LOG_GCS_PATH}"; then
echo "Logs for failed pod ${POD_NAME} uploaded to: ${LOG_GCS_PATH}"
else
echo "Log upload to ${LOG_GCS_PATH} failed."
Expand Down
2 changes: 1 addition & 1 deletion datasketches/datasketches.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ function download_libraries()
function download_example_jar()
{
if [[ "${SPARK_VERSION}" < "3.5" ]]; then
gsutil cp "${SPARK_JAVA_EXAMPLE_JAR}" "${DS_LIBPATH}"
gcloud storage cp "${SPARK_JAVA_EXAMPLE_JAR}" "${DS_LIBPATH}"
if [ $? -eq 0 ]; then
echo "Downloaded "${SPARK_JAVA_EXAMPLE_JAR}" successfully"
else
Expand Down
2 changes: 1 addition & 1 deletion gobblin/gobblin.sh
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ EOF
function install_package() {
# Download binary.
local temp=$(mktemp -d)
gsutil cp "${PACKAGE_URL}" "${temp}/package.tar.gz"
gcloud storage cp "${PACKAGE_URL}" "${temp}/package.tar.gz"
tar -xf "${temp}/package.tar.gz" -C "${temp}"

# Setup package.
Expand Down
2 changes: 1 addition & 1 deletion gpu/manual-test-runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ function exit_handler() {
# TODO: remove any test related resources in the project

echo 'Uploading local logs to GCS bucket.'
gsutil -m rsync -r "${log_dir}/" "${gcs_log_dir}/"
gcloud storage rsync --recursive "${log_dir}/" "${gcs_log_dir}/"

if [[ -f "${tmp_dir}/tests_success" ]]; then
echo -e "${GREEN}Workflow succeeded${NC}, check logs at ${log_dir}/ or ${gcs_log_dir}/"
Expand Down
4 changes: 2 additions & 2 deletions hbase/test_hbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ def setUp(self):
super().setUp()
self.GCS_BUCKET = "test-hbase-{}-{}".format(self.datetime_str(),
self.random_str())
self.assert_command('gsutil mb -c regional -l {} gs://{}'.format(
self.assert_command('gcloud storage buckets create --default-storage-class=regional --location {} gs://{}'.format(
self.REGION, self.GCS_BUCKET))
Comment on lines +18 to 19
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This command string is getting long. To improve readability, you could break it into multiple lines and assign it to a variable before passing it to self.assert_command.

Suggested change
self.assert_command('gcloud storage buckets create --default-storage-class=regional --location {} gs://{}'.format(
self.REGION, self.GCS_BUCKET))
create_bucket_cmd = (
'gcloud storage buckets create '
'--default-storage-class=regional '
'--location {} gs://{}'
).format(self.REGION, self.GCS_BUCKET)
self.assert_command(create_bucket_cmd)


def tearDown(self):
self.assert_command('gsutil -m rm -rf gs://{}'.format(self.GCS_BUCKET))
self.assert_command('gcloud storage rm --recursive --continue-on-error gs://{}'.format(self.GCS_BUCKET))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This command string is a bit long. For better readability, consider assigning it to a variable on a separate line and breaking it up for clarity.

        rm_bucket_cmd = (
            'gcloud storage rm --recursive --continue-on-error gs://{}'
        ).format(self.GCS_BUCKET)
        self.assert_command(rm_bucket_cmd)

super().tearDown()

def verify_instance(self, name):
Expand Down
2 changes: 1 addition & 1 deletion hive-lineage/hive-lineage.sh
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ function set_hive_lineage_conf() {

function install_jars() {
echo "Installing openlineage-hive hook"
gsutil cp -P "$INSTALLATION_SOURCE/hive-openlineage-hook-$HIVE_OL_HOOK_VERSION.jar" "$HIVE_LIB_DIR/hive-openlineage-hook.jar"
gcloud storage cp --preserve-posix "$INSTALLATION_SOURCE/hive-openlineage-hook-$HIVE_OL_HOOK_VERSION.jar" "$HIVE_LIB_DIR/hive-openlineage-hook.jar"
}

function restart_hive_server2_master() {
Expand Down
2 changes: 1 addition & 1 deletion knox/verify_knox.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ function test_installation() {
# to test update, we will upload a new topology to gs bucket, and check whether it appears
# we assume that knox initialization action is the very first one, /etc/google-dataproc/startup-scripts/dataproc-initialization-script-0
function test_update_new_topology() {
gsutil cp /etc/knox/conf/topologies/example-hive-pii.xml "${KNOX_GW_CONFIG_GCS}/topologies/update_topology.xml"
gcloud storage cp /etc/knox/conf/topologies/example-hive-pii.xml "${KNOX_GW_CONFIG_GCS}/topologies/update_topology.xml"
sudo /bin/bash /etc/google-dataproc/startup-scripts/dataproc-initialization-script-0 update
test_installation update_topology
[[ $? == 1 ]] && return 1
Expand Down
2 changes: 1 addition & 1 deletion push-to-gcs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,6 @@ for file in "${MODULE}/"*.sh; do
fi
done

gsutil -m rsync -R -x "__pycache__/.*" "${MODULE}/" "${GCS_FOLDER}"
gcloud storage rsync --recursive --exclude "__pycache__/.*" "${MODULE}/" "${GCS_FOLDER}"

echo "Pushed ${MODULE}/ to ${GCS_FOLDER}."
3 changes: 1 addition & 2 deletions rapids/manual-test-runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ function exit_handler() {
# TODO: remove any test related resources in the project

echo 'Uploading local logs to GCS bucket.'
gsutil -m rsync -r "${log_dir}/" "${gcs_log_dir}/"
gcloud storage rsync --recursive "${log_dir}/" "${gcs_log_dir}/"

if [[ -f "${tmp_dir}/tests_success" ]]; then
echo -e "${GREEN}Workflow succeeded, check logs at ${log_dir}/ or ${gcs_log_dir}/${NC}"
Expand All @@ -74,4 +74,3 @@ export INTERNAL_IP_SSH="true"
screen -US "${session_name}" -c rapids/bazel.screenrc