CLI & SDK
CLI & SDK¶
1. CLI ๊ฐ์¶
1.1 AWS CLI vs gcloud CLI¶
| ํญ๋ชฉ | AWS CLI | gcloud CLI |
|---|---|---|
| ์ค์น ํจํค์ง | awscli | google-cloud-sdk |
| ๊ตฌ์ฑ ๋ช ๋ น | aws configure | gcloud init |
| ํ๋กํ | --profile | --configuration |
| ์ถ๋ ฅ ํ์ | json, text, table, yaml | json, text, yaml, csv |
2. AWS CLI¶
2.1 ์ค์น¶
# macOS
curl "https://awscli.amazonaws.com/AWSCLIV2.pkg" -o "AWSCLIV2.pkg"
sudo installer -pkg AWSCLIV2.pkg -target /
# Linux
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
# pip
pip install awscli
# ๋ฒ์ ํ์ธ
aws --version
2.2 ๊ตฌ์ฑ¶
# ๊ธฐ๋ณธ ๊ตฌ์ฑ
aws configure
# AWS Access Key ID: AKIAIOSFODNN7EXAMPLE
# AWS Secret Access Key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
# Default region name: ap-northeast-2
# Default output format: json
# ํ๋กํ ์ถ๊ฐ
aws configure --profile production
aws configure --profile development
# ํ๋กํ ๋ชฉ๋ก
aws configure list-profiles
# ํ๊ฒฝ ๋ณ์๋ก ์ค์
export AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
export AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI...
export AWS_DEFAULT_REGION=ap-northeast-2
export AWS_PROFILE=production
~/.aws/credentials:
[default]
aws_access_key_id = AKIAIOSFODNN7EXAMPLE
aws_secret_access_key = wJalrXUtnFEMI...
[production]
aws_access_key_id = AKIAI44QH8DHBEXAMPLE
aws_secret_access_key = je7MtGbClwBF/2Zp9Utk...
~/.aws/config:
[default]
region = ap-northeast-2
output = json
[profile production]
region = ap-northeast-1
output = table
2.3 ์ฃผ์ ๋ช ๋ น์ด¶
# EC2
aws ec2 describe-instances
aws ec2 run-instances --image-id ami-xxx --instance-type t3.micro
aws ec2 start-instances --instance-ids i-xxx
aws ec2 stop-instances --instance-ids i-xxx
aws ec2 terminate-instances --instance-ids i-xxx
# S3
aws s3 ls
aws s3 cp file.txt s3://bucket/
aws s3 sync ./folder s3://bucket/folder
aws s3 rm s3://bucket/file.txt
# IAM
aws iam list-users
aws iam create-user --user-name john
aws iam attach-user-policy --user-name john --policy-arn arn:aws:iam::aws:policy/ReadOnlyAccess
# Lambda
aws lambda list-functions
aws lambda invoke --function-name my-func output.json
# RDS
aws rds describe-db-instances
aws rds create-db-snapshot --db-instance-identifier mydb --db-snapshot-identifier mysnap
2.4 ์ถ๋ ฅ ํํฐ๋ง (--query)¶
# JMESPath ์ฟผ๋ฆฌ ์ฌ์ฉ
aws ec2 describe-instances \
--query 'Reservations[*].Instances[*].[InstanceId,State.Name,PublicIpAddress]' \
--output table
# ํน์ ํ๊ทธ๋ก ํํฐ
aws ec2 describe-instances \
--filters "Name=tag:Environment,Values=Production" \
--query 'Reservations[*].Instances[*].InstanceId' \
--output text
# ์ ๋ ฌ
aws ec2 describe-instances \
--query 'sort_by(Reservations[].Instances[], &LaunchTime)[*].[InstanceId,LaunchTime]'
# ์กฐ๊ฑด ํํฐ
aws ec2 describe-instances \
--query 'Reservations[].Instances[?State.Name==`running`].InstanceId'
3. gcloud CLI¶
3.1 ์ค์น¶
# macOS
brew install --cask google-cloud-sdk
# ๋๋ ์ง์ ์ค์น
curl https://sdk.cloud.google.com | bash
exec -l $SHELL
# Linux
curl -O https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-cli-linux-x86_64.tar.gz
tar -xf google-cloud-cli-linux-x86_64.tar.gz
./google-cloud-sdk/install.sh
# ๋ฒ์ ํ์ธ
gcloud --version
3.2 ๊ตฌ์ฑ¶
# ์ด๊ธฐํ (๋ธ๋ผ์ฐ์ ์ธ์ฆ)
gcloud init
# ๋ก๊ทธ์ธ
gcloud auth login
# ์๋น์ค ๊ณ์ ์ธ์ฆ
gcloud auth activate-service-account --key-file=key.json
# ํ๋ก์ ํธ ์ค์
gcloud config set project PROJECT_ID
# ๋ฆฌ์ /์กด ์ค์
gcloud config set compute/region asia-northeast3
gcloud config set compute/zone asia-northeast3-a
# ํ์ฌ ๊ตฌ์ฑ ํ์ธ
gcloud config list
# ๊ตฌ์ฑ ํ๋กํ
gcloud config configurations create production
gcloud config configurations activate production
gcloud config configurations list
3.3 ์ฃผ์ ๋ช ๋ น์ด¶
# Compute Engine
gcloud compute instances list
gcloud compute instances create my-vm --machine-type=e2-medium
gcloud compute instances start my-vm
gcloud compute instances stop my-vm
gcloud compute instances delete my-vm
gcloud compute ssh my-vm
# Cloud Storage
gsutil ls
gsutil cp file.txt gs://bucket/
gsutil rsync -r ./folder gs://bucket/folder
gsutil rm gs://bucket/file.txt
# IAM
gcloud iam service-accounts list
gcloud iam service-accounts create my-sa
gcloud projects add-iam-policy-binding PROJECT --member=user:john@example.com --role=roles/viewer
# Cloud Functions
gcloud functions list
gcloud functions deploy my-func --runtime=python312 --trigger-http
# Cloud SQL
gcloud sql instances list
gcloud sql instances create mydb --database-version=MYSQL_8_0
3.4 ์ถ๋ ฅ ํํฐ๋ง (--filter, --format)¶
# ํํฐ๋ง
gcloud compute instances list \
--filter="status=RUNNING AND zone:asia-northeast3"
# ์ถ๋ ฅ ํ์
gcloud compute instances list \
--format="table(name,zone.basename(),status,networkInterfaces[0].accessConfigs[0].natIP)"
# JSON ์ถ๋ ฅ
gcloud compute instances describe my-vm --format=json
# ํน์ ํ๋๋ง
gcloud compute instances list \
--format="value(name,networkInterfaces[0].accessConfigs[0].natIP)"
# CSV
gcloud compute instances list \
--format="csv(name,zone,status)"
4. Python SDK¶
4.1 AWS SDK (boto3)¶
์ค์น:
pip install boto3
๊ธฐ๋ณธ ์ฌ์ฉ:
import boto3
# ํด๋ผ์ด์ธํธ ๋ฐฉ์
ec2_client = boto3.client('ec2')
response = ec2_client.describe_instances()
# ๋ฆฌ์์ค ๋ฐฉ์ (๊ณ ์์ค)
ec2 = boto3.resource('ec2')
instances = ec2.instances.filter(
Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]
)
for instance in instances:
print(instance.id, instance.public_ip_address)
์๋น์ค๋ณ ์์:
import boto3
# S3
s3 = boto3.client('s3')
s3.upload_file('file.txt', 'my-bucket', 'file.txt')
s3.download_file('my-bucket', 'file.txt', 'downloaded.txt')
# DynamoDB
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Users')
table.put_item(Item={'userId': '001', 'name': 'John'})
response = table.get_item(Key={'userId': '001'})
# Lambda
lambda_client = boto3.client('lambda')
response = lambda_client.invoke(
FunctionName='my-function',
Payload=json.dumps({'key': 'value'})
)
# SQS
sqs = boto3.client('sqs')
sqs.send_message(
QueueUrl='https://sqs.../my-queue',
MessageBody='Hello'
)
# Secrets Manager
secrets = boto3.client('secretsmanager')
secret = secrets.get_secret_value(SecretId='my-secret')
4.2 GCP SDK (google-cloud)¶
์ค์น:
pip install google-cloud-storage
pip install google-cloud-compute
pip install google-cloud-firestore
# ํ์ํ ๋ผ์ด๋ธ๋ฌ๋ฆฌ๋ณ ์ค์น
๊ธฐ๋ณธ ์ฌ์ฉ:
from google.cloud import storage
from google.cloud import compute_v1
# ์ธ์ฆ (์๋น์ค ๊ณ์ )
# export GOOGLE_APPLICATION_CREDENTIALS="key.json"
# Cloud Storage
client = storage.Client()
bucket = client.bucket('my-bucket')
blob = bucket.blob('file.txt')
blob.upload_from_filename('file.txt')
blob.download_to_filename('downloaded.txt')
# Compute Engine
instance_client = compute_v1.InstancesClient()
instances = instance_client.list(project='my-project', zone='asia-northeast3-a')
for instance in instances:
print(instance.name, instance.status)
์๋น์ค๋ณ ์์:
# Firestore
from google.cloud import firestore
db = firestore.Client()
doc_ref = db.collection('users').document('001')
doc_ref.set({'name': 'John', 'age': 30})
doc = doc_ref.get()
print(doc.to_dict())
# Pub/Sub
from google.cloud import pubsub_v1
publisher = pubsub_v1.PublisherClient()
topic_path = publisher.topic_path('project', 'topic')
publisher.publish(topic_path, b'Hello')
# Secret Manager
from google.cloud import secretmanager
client = secretmanager.SecretManagerServiceClient()
name = f"projects/PROJECT/secrets/my-secret/versions/latest"
response = client.access_secret_version(request={"name": name})
secret = response.payload.data.decode("UTF-8")
# BigQuery
from google.cloud import bigquery
client = bigquery.Client()
query = "SELECT * FROM `project.dataset.table` LIMIT 10"
results = client.query(query)
for row in results:
print(row)
5. ์๋ํ ์คํฌ๋ฆฝํธ ์์¶
5.1 ๋ฆฌ์์ค ์ ๋ฆฌ ์คํฌ๋ฆฝํธ¶
AWS - ๋ฏธ์ฌ์ฉ EBS ๋ณผ๋ฅจ ์ญ์ :
import boto3
ec2 = boto3.client('ec2')
# ๋ฏธ์ฌ์ฉ ๋ณผ๋ฅจ ์ฐพ๊ธฐ
volumes = ec2.describe_volumes(
Filters=[{'Name': 'status', 'Values': ['available']}]
)
for vol in volumes['Volumes']:
vol_id = vol['VolumeId']
print(f"Deleting unused volume: {vol_id}")
# ec2.delete_volume(VolumeId=vol_id) # ์ฃผ์ ํด์ ํ์ฌ ์ค์ ์ญ์
GCP - ์ค๋๋ ์ค๋ ์ท ์ญ์ :
from google.cloud import compute_v1
from datetime import datetime, timedelta
client = compute_v1.SnapshotsClient()
project = 'my-project'
snapshots = client.list(project=project)
cutoff = datetime.now() - timedelta(days=30)
for snapshot in snapshots:
created = datetime.fromisoformat(snapshot.creation_timestamp.replace('Z', '+00:00'))
if created < cutoff.replace(tzinfo=created.tzinfo):
print(f"Deleting old snapshot: {snapshot.name}")
# client.delete(project=project, snapshot=snapshot.name)
5.2 ๋ฐฐํฌ ์คํฌ๋ฆฝํธ¶
AWS Lambda ๋ฐฐํฌ:
import boto3
import zipfile
import os
def deploy_lambda(function_name, source_dir):
# ์ฝ๋ ์์ถ
with zipfile.ZipFile('function.zip', 'w') as zf:
for root, dirs, files in os.walk(source_dir):
for file in files:
zf.write(os.path.join(root, file))
# Lambda ์
๋ฐ์ดํธ
lambda_client = boto3.client('lambda')
with open('function.zip', 'rb') as f:
lambda_client.update_function_code(
FunctionName=function_name,
ZipFile=f.read()
)
print(f"Deployed {function_name}")
deploy_lambda('my-function', './src')
5.3 ๋ชจ๋ํฐ๋ง ์คํฌ๋ฆฝํธ¶
AWS ์ธ์คํด์ค ์ํ ํ์ธ:
#!/bin/bash
# check-instances.sh
instances=$(aws ec2 describe-instances \
--filters "Name=tag:Environment,Values=Production" \
--query 'Reservations[].Instances[].[InstanceId,State.Name]' \
--output text)
while read -r instance_id state; do
if [ "$state" != "running" ]; then
echo "WARNING: $instance_id is $state"
# ์๋ฆผ ์ ์ก
aws sns publish \
--topic-arn arn:aws:sns:...:alerts \
--message "Instance $instance_id is $state"
fi
done <<< "$instances"
6. ํ์ด์ง๋ค์ด์ ์ฒ๋ฆฌ¶
6.1 AWS CLI ํ์ด์ง๋ค์ด์ ¶
# ์๋ ํ์ด์ง๋ค์ด์
aws s3api list-objects-v2 --bucket my-bucket
# ์๋ ํ์ด์ง๋ค์ด์
aws s3api list-objects-v2 --bucket my-bucket --max-items 100
# ๋ค์ ํ์ด์ง
aws s3api list-objects-v2 --bucket my-bucket --starting-token TOKEN
boto3 Paginator:
import boto3
s3 = boto3.client('s3')
paginator = s3.get_paginator('list_objects_v2')
for page in paginator.paginate(Bucket='my-bucket'):
for obj in page.get('Contents', []):
print(obj['Key'])
6.2 gcloud ํ์ด์ง๋ค์ด์ ¶
# ์๋ ํ์ด์ง๋ค์ด์
gcloud compute instances list
# ์๋
gcloud compute instances list --limit=100 --page-token=TOKEN
Python:
from google.cloud import storage
client = storage.Client()
blobs = client.list_blobs('my-bucket') # ์๋ ํ์ด์ง๋ค์ด์
for blob in blobs:
print(blob.name)
7. ์๋ฌ ์ฒ๋ฆฌ¶
7.1 boto3 ์๋ฌ ์ฒ๋ฆฌ¶
import boto3
from botocore.exceptions import ClientError, NoCredentialsError
try:
s3 = boto3.client('s3')
s3.head_object(Bucket='my-bucket', Key='file.txt')
except NoCredentialsError:
print("Credentials not found")
except ClientError as e:
error_code = e.response['Error']['Code']
if error_code == '404':
print("Object not found")
elif error_code == '403':
print("Access denied")
else:
raise
7.2 GCP ์๋ฌ ์ฒ๋ฆฌ¶
from google.cloud import storage
from google.api_core.exceptions import NotFound, Forbidden
try:
client = storage.Client()
bucket = client.get_bucket('my-bucket')
blob = bucket.blob('file.txt')
blob.download_to_filename('file.txt')
except NotFound:
print("Bucket or object not found")
except Forbidden:
print("Access denied")
8. ๋ค์ ๋จ๊ณ¶
- 16_Infrastructure_as_Code.md - Terraform
- 17_Monitoring_Logging_Cost.md - ๋ชจ๋ํฐ๋ง