from django.core.management.base import BaseCommand, CommandError
from crocolinks.models import CrocoLink
from datetime import datetime
import os
import shutil
import boto3
import logging
from botocore.config import Config
import requests
from botocore.exceptions import ClientError, NoCredentialsError
import time
from twisted.internet import task, reactor
##mysqlimport
#import mysql.connector
from pathlib import Path
from os import path
###AWS INFO####
# print(list_objects_bucket)
class Command(BaseCommand):
help = 'Linkebis aploadi'
def handle(self,*args,**kwargs):
access_key = 'XXXXXXXXXXXXXXXXXXXXXXXX'
access_secret = 'XXXXXXXXXXXXXXXXXXXXXXXX'
bucket_name = 'XXXXXXXXXXXXXXXXXXXXXXXX'
bucket_name2= 'XXXXXXXXXXXXXXXXXXXXXXXX'
client = boto3.client('s3')
list_objects_bucket = client.list_objects(Bucket=bucket_name)
# mydb = mysql.connector.connect(host="localhost", user="newuser",database="cointrack",passwd="password")
# mycursor = mydb.cursor()
####Connet To S3 Service
client_s3= boto3.client(
's3',
region_name="eu-west-2",
aws_access_key_id=access_key,
aws_secret_access_key=access_secret
)
counter = 0
s3_resource = boto3.resource("s3", region_name="eu-west-2")
#upload files to S3 Bucker
data_file_folder = r"//10.0.83.27/Shared/123"
t1 = time.strftime('%Y-%m-%d %H:%M:%S')
try:
#bucket_name = "S3_Bucket_Name" #s3 bucket name
data_file_folder = r"//10.0.83.27/Shared/123/" # local folder for upload
my_bucket = s3_resource.Bucket(bucket_name)
my_bucket2= s3_resource.Bucket(bucket_name2)
for path, subdirs, files in os.walk(data_file_folder):
path = path.replace("\\","/")
directory_name = path.replace(data_file_folder,"")
Destination_dir= "//10.0.83.27/Shared/gadatanilebi/"
Dest_dir_xelmeored="//10.0.83.277/Shared/Xelmeoredatvirtulebi/"
for file in files:
if os.path.isfile(Destination_dir file)==False:
now = datetime.now()
my_bucket2.upload_file(os.path.join(path, file),file)
t1 = time.strftime('%Y-%m-%d %H:%M:%S')
print('Uploading file {0}...'.format(file))
print(path)
print(t1)
counter =1
#shutil.move(path "/" file, Destination_dir)
print(file)
shutil.move((path "/" file), os.path.join(Destination_dir,file))
else:
if os.path.isfile(Destination_dir file)==True: #### Tu ukve ertxel gadatanili iqneb sxva foldershi gadaitans ro ar gadaawero
now = datetime.now()
my_bucket.upload_file(os.path.join(path, file),file)#directory_name '/' file) ###bucketze Uploadi
my_bucket2.upload_file(os.path.join(path, file),file)
t1 = time.strftime('%Y-%m-%d %H:%M:%S')
print('Uploading file {0}...'.format(file))
print(path)
print(t1)
#shutil.move(path "/" file, Destination_dir)
print(file)
counter =1
shutil.move((path "/" file), os.path.join(Dest_dir_xelmeored,file))
print(counter)
#shutil.copytree(path "/" file, Destination_dir, file_exist_ok=True)
# os.rename(file,Destination_dir)
When i try to run into my local computer work correctly , but when i move into Windows server 2021
THere is error code
File "C:\Users\loc\AppData\Local\Programs\Python\Python39\lib\site-packages\botocore\auth.py", line 357, in add_auth
raise NoCredentialsError
botocore.exceptions.NoCredentialsError: Unable to locate credentials
What is problem ? Credentials is 100% correct , because as I told it's work in my local computer. I mean , when i run in my Computer , Windows 10 it's works correctly , when i run another computer , Windows 2012 server not works
CodePudding user response:
Though you provided credentials in code, you are not using it anywhere.
It works in your local machine because, you might have AWS CLI installed and configured credentials and the code would have used that configured creds
The below code will use inline creds in code, however would advice you to use creds set using EC2 instance profile deployed to AWS or use configured creds with CLI
import boto3
session = boto3.Session(
aws_access_key_id=access_key,
aws_secret_access_key=access_secret
)
s3_resource = session.resource("s3", region_name="eu-west-2")