Although I've been happily running this script for best part of a year, i recently upgraded to Catalina OSX and reinstalled Exchangelib. Now I get an error with item_id:
'Message' object has no attribute 'item_id'
Here's my code, I would love to know what I'm doing wrong please 🙏 TIA ps-forgive any convoluted coding...
from exchangelib import DELEGATE, Account, Credentials, Message, \
EWSDateTime, EWSTimeZone, Configuration
from exchangelib.util import PrettyXmlHandler
import logging
logging.basicConfig(level=logging.DEBUG, handlers=[PrettyXmlHandler()])
from datetime import datetime, timedelta
import monthdelta as md
import sqlite3
import pandas as pd
import pm_ews_module as pem
__DBPATH__ = "/Users/patrickstacey/CODE/JUMPY_CODE/dev/data/test_data_tbase11_002"
__CONFIGFILE__ = '/Users/patrickstacey/CODE/JUMPY_CODE/dev/config/jumpyConfig.csv'
__OUTLOOK_EMAIL__ = 'bspks#lunet.lboro.ac.uk'
_PRIMARY_SMTP_ADDRESS_ = 'bspks#lunet.lboro.ac.uk'
__OUTLOOK_PASSWORD__ = '****'
def connect_to_EWS(__OUTLOOK_EMAIL__, __OUTLOOK_PASSWORD__, _PRIMARY_SMTP_ADDRESS_):
creds = Credentials(__OUTLOOK_EMAIL__,__OUTLOOK_PASSWORD__)
config = Configuration(server='outlook.office365.com/EWS/Exchange.asmx', \
credentials=creds)
return Account(
primary_smtp_address=_PRIMARY_SMTP_ADDRESS_,
autodiscover=False,
config = config,
access_type=DELEGATE
)
last_analysis = pem.determine_start_date_required(__OUTLOOK_EMAIL__)
if last_analysis == "no records":
df = pd.read_csv(__CONFIGFILE__)
retrodays = df['detail'].where(df['item'] == "demo_user_days_retro").dropna().values
retrodays = int(retrodays)
last_analysis = None
last_analysis = datetime.today() - timedelta(days=retrodays)
(year,month,day,hour,mins,secs) = pem.unpackDateElements(str(last_analysis))
tz = EWSTimeZone.timezone('Europe/London')
last_analysis = tz.localize(EWSDateTime(year, month, day, hour, mins, secs))
account = connect_to_EWS(__OUTLOOK_EMAIL__, __OUTLOOK_PASSWORD__, __OUTLOOK_EMAIL__)
for item in account.inbox.filter(datetime_received__gt=last_analysis):
if type(item) == Message:
try:
db = sqlite3.connect(__DBPATH__)
cursor = db.cursor()
cursor.execute("INSERT INTO escores_log(email, datetime, subject, body, emailtype, pos_threshold, item_id, status, sender) VALUES(?,?,?,?,?,?,?,?,?)", (__OUTLOOK_EMAIL__, str(item.datetime_received), pem.deEmojify(item.subject), item.text_body, "received", 0.5, item.item_id, 0, item.sender.email_address))
print("Inserted an email from ",item.sender.email_address," about ",item.subject," on ",str(item.datetime_received))
db.commit()
db.close()
except Exception as e:
print ("Exception found: "+str(e))
pass
item_id was renamed to id in version 1.12.0, and finally deprecated in 2.0.0. See notes in the CHANGELOG: https://github.com/ecederstrand/exchangelib/blob/master/CHANGELOG.md#200
looks as though item_id is now called id. I took an educated guess. so the script works again. look fwd to hearing any other views on this. with thanks.
Related
I am getting this error when trying to send and SNS email via lambda function:
"errorMessage": "Connect timeout on endpoint URL: \"https://sns.us-west-1.amazonaws.com/\"",
"errorType": "ConnectTimeoutError"
I have all the policies set up with SNS full access to the respective role tied to function. Here is the full function:
import json
import psycopg2
import boto3
import time
import requests
import pandas as pd
import numpy as np
from datetime import datetime
import sys
import logging
import os
import csv
import smtplib
from base64 import b64decode
#bucket = 's3://data-lake-020192/'
credential = {
'dbname' : 'main',
'host_url' : 'test.us-west-1.redshift.amazonaws.com',
'port' : '5439',
'user' : '####',
'password' : '########'
}
redshift_role = {
'dev': 'arn:aws:lambda:us-west-1:##########:function:test_function'
}
def lambda_handler(event, context):
## S3 CONNECTIVITY ##
s3 = boto3.resource('s3')
#client = boto3.client('s3')
# TODO implement
conn_string = "dbname='{}' port='{}' user='{}' password='{}' host='{}'"\
.format(credential['dbname'], credential['port'], credential['user'], credential['password'], credential['host_url'])
sql_query = """with
tbl as (
select
case
when (sa.parentid like '001i0000023STBY%' or sa.ultimate_parent_account__c like '001i0000023STBY%') --Parent OR Ultimate Parent is <Department of Defense>
then sa.id
else
coalesce(sa.ultimate_parent_account__c, sa.parentid, sa.id) end as cust_id,
(select name from salesforce.account where id=cust_id) as cust_name,
sa.name as acct_name,
sa.id as acct_id,
sa.parentid,
(select name from salesforce.account where id=sa.parentid) as par_name,
(select name from salesforce.account where id=sa.ultimate_parent_account__c) as ult_par_name,
so.id as opp_id,
so.name as opp_name,
so.stagename as stg_name,
so.type as opp_type,
so.Manager_Commit__c as mgr_commit,
so.renewal_risk__c as opp_risk,
so.isclosed as cls
salesforce.opportunity so
join
salesforce.account sa on
so.accountid = sa.id
join salesforce.user su on
so.ownerid = su.id
join salesforce.opportunitylineitem sol on
so.id = sol.opportunityid
join salesforce.product2 sp on
sol.product2id = sp.id
join salesforce.customasset__c sca on
so.id = sca.opportunity__c
where
so.isdeleted = false
and sa.isdeleted = false
and sol.isdeleted = false
)
select * from
(select
tbl.acct_name as acct,
'[' || 'Link' || '](' || concat('https://vectranetworks.lightning.force.com/', tbl.opp_id) || ')' as opp_link,
tbl.ca_name,
tbl.ca_pr_name,
tbl.ca_mode,
date(tbl.ca_last_seen) as ca_last_seen,
tbl.ca_sw_version,
tbl.ca_tot_hosts,
tbl.ca_active_hosts,
tbl.ca_x95_hosts_tot,
tbl.ca_traffic,
tbl.ca_uiconfig
from
tbl
where
tbl.stg_name like 'Closed Won%'
and tbl.arr is not null
group by
tbl.acct_name,
tbl.opp_id,
tbl.ca_name,
tbl.ca_pr_name,
tbl.ca_mode,
tbl.ca_last_seen,
tbl.ca_sw_version,
tbl.ca_tot_hosts,
tbl.ca_active_hosts,
tbl.ca_x95_hosts_tot,
tbl.ca_traffic,
tbl.ca_uiconfig) df
WHERE ca_last_seen >= DATEADD(MONTH, -3, GETDATE())
limit 5"""
con = psycopg2.connect(conn_string)
client2 = boto3.client('sns')
with con.cursor() as cur:
# Enter the query that you want to execute
cur.execute(sql_query)
for row in cur:
df = pd.DataFrame.from_records(cur.fetchall(), columns = [desc[0] for desc in cur.description])
df['Time_Stamp'] = pd.to_datetime('now', utc=True)
df['ca_active_hosts'] = df['ca_active_hosts'].astype('Int64', errors='ignore')
df['ca_active_hosts'].fillna(0, inplace=True)
#print(df.iloc[0])
#if (df.iloc[0]['ca_active_hosts'].notna()):
if (df['ca_active_hosts'] >= 0).all():
print('the file is present, going to send notifaction')
response = client2.publish(
TopicArn = 'arn:aws:sns:us-west-1:##########:email-data-lake',
Message = 'Warning User active_hosts is ' +str(df['Time_Stamp']),
Subject = 'User Warning')
else:
print('the file is not present')
#cur.close()
Is there anything else in code/connection I need to change? Feel I have exhausted all that I can find online being new to SNS
I imagine that your lambda function does not have any internet connectivity.
Thus, a connection timeout issue indicates that the network interface associated with your lambda function is unable to talk to the service.
To fix this, create a VPC interface endpoint for sns.us-west-1.amazonaws.com in the same subnet as that of the lambda's network interface.
I am pretty new to coding and aws chalice. I tried writing a code that gets messages from trading-view and executes orders depending on the signals.
I tested the code locally and everything worked fine, but when I test the Rest API I get the following error:
{"message":"Missing Authentication Token"}
I set up my credentials via "aws configure" as explained here: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html
I also created a config.txt file in my aws folder and checked my settings via "aws configure get" and they were fine.
The index function in the beginning worked too, so there should be a problem within my code?
I changed some values and cut some functions and the strategy part out, but the code looks somewhat like this:
from chalice import Chalice
from datetime import datetime
from binance.client import Client
from binance.enums import *
import ccxt
exchange = ccxt.binance({
'apiKey': 'KEY',
'secret': 'SECRET',
'enableRateLimit': True,
'options': {
'defaultType': 'future',
},
})
def buy_order(quantity, symbol, order_type = ORDER_TYPE_MARKET,side=SIDE_BUY,recvWindow=5000):
try:
print("sending order")
order = client.futures_create_order(symbol = symbol, type = order_type, side = side, quantity = quantity,recvWindow=recvWindow)
print(order)
except Exception as e:
print("an exception occured - {}".format(e))
return False
return True
app = Chalice(app_name='tradingview-webhook-alert')
indicator1 = "x"
indicator2 = "y"
TRADE_SYMBOL = "Test123"
in_position = False
def diff_time(time1, time2):
fmt = '%Y-%m-%dT%H:%M:%SZ'
tstamp1 = datetime.strptime(time1, fmt)
tstamp2 = datetime.strptime(time2, fmt)
if tstamp1 > tstamp2:
td = tstamp1 - tstamp2
else:
td = tstamp2 - tstamp1
td_mins = int(round(td.total_seconds() / 60))
return td_mins
#app.route('/test123', methods=['POST'])
def test123():
global indicator1, indicator2
request = app.current_request
message = request.json_body
indicator = message["indicator"]
price = message["price"]
value = message["value"]
if indicator == "indicator1":
indicator1 = value
if indicator == "indicator2":
indicator2 = value
if in_position == False:
if (indicator1 >123) & (indicator2 < 321):
balance = exchange.fetch_free_balance()
usd = float(balance['USDT'])
TRADE_QUANTITY = (usd / price)*0.1
order_succeeded = buy_order(TRADE_QUANTITY, TRADE_SYMBOL)
if order_succeeded:
in_position = True
return {"test": "123"}
I tested it locally with Insomnia and tried the Rest API link there and in my browser, both with the same error message. Is my testing method wrong or is it the code? But even then, why isn't the Rest API link working, when I include the index function from the beginning again? If I try the index function from the beginning, I get the {"message": "Internal server error"} .
This is probably a very very basic question but I couldn't find an answer online.
Any help would be appreciated!
I am not pretty sure if that helps you because I don't really understand your question but:
You are using a POST-request which will not be executed by opening a URL.
Try something like #app.route('/test123', methods=['POST', 'GET']) so that if you just open the URL, it will execute a GET-request
Some more information:
https://www.w3schools.com/tags/ref_httpmethods.asp
I am struggling to convert this captured output as a dictionary so i can distribute the list of users who have a password over 90 days old:
import boto3
import datetime
from dateutil.tz import tzutc
sts_client = boto3.client('sts')
assumed_role_object=sts_client.assume_role(
RoleArn="arn:aws:iam::9999999999:role/role",
RoleSessionName="AssumedRoleSession2"
)
credentials=assumed_role_object['Credentials']
resource=boto3.resource(
'iam',
aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'],
)
sns = boto3.client('sns')
import json
import boto3
def lambda_handler(context,event):
today = datetime.datetime.now()
for user in resource.users.all():
if user.password_last_used is not None:
delta = (today - user.password_last_used.replace(tzinfo=None)).days
if delta >= 90:
print(','.join(('Username: ',str([user.user_name]), str(delta))))
sns_message = ','.join(('Username: ',str([user.user_name]), str(delta)))
response = sns.publish(
TopicArn='arn:aws:sns:eu-west-2:1111111111:topic',
Message= sns_message,
Subject='Users who have failed to log in over 90 Days',
)
My current attempt is causing 189 emails containing a single user name and password age, i want an email contenting a list of users and their password ages
Is there anyone kind enough to help me?
Thanks
Nick
Your sns.publish is inside your for loop. Create your list of users inside the loop then call the publish command.
I may get some of the syntax wrong as i can't test, but hopefully you'll the point.
user_list = []
for user in resource.users.all():
if user.password_last_used is not None:
delta = (today - user.password_last_used.replace(tzinfo=None)).days
if delta >= 90:
print(','.join(('Username: ',str([user.user_name]), str(delta))))
user_list.append(','.join(('Username: ',str([user.user_name]), str(delta))))
sns_message = user_list # you may need to check formatting of this prior to sending to sns???? it's a list, not a string
response = sns.publish(
TopicArn='arn:aws:sns:eu-west-2:1111111111:topic',
Message= sns_message,
Subject='Users who have failed to log in over 90 Days',
)
Hi I have a problem with this piece of code that needs to get stock prices based on a defined time period and a ticker code.
The program actually works when I use my IEX API KEY, but not when I use my TEST IEX API KEY, I get the following error message
Unable to read URL: https://cloud.iexapis.com/stable/stock/market/batch?symbols=AAPL&types=chart&range=1y&token=Tpk_157dbb6ac5914bb6b5e309b5eb1484f5
Response Text:
b'Test tokens may only be used in the sandbox environment. Please use https://sandbox.iexapis.com' error
'''
How to download stock data
'''
import pandas as pd
import pandas_datareader.data as web
import datetime as dt
from datetime import datetime
import os
os.environ["IEX_API_KEY"] = "Tpk_157dbb6ac5914bb6b5e309b5eb1484f5"
def get_stock_data():
tickers = ['AAPL'] #capitalize tickers
start = dt.datetime(2019,1,1) # can import 5 years max with iex
end = dt.datetime.today()
if not os.path.exists('stockdata'):
os.makedirs('stockdata')
for ticker in tickers:
print(ticker)
try :
df = web.DataReader(ticker, "iex", start, end)
print(df.head())
df.to_csv('stockdata/{}.to_csv'.format(ticker))
print(ticker, 'downloaded')
except Exception as e:
print(e, 'error')
get_stock_data()
I probably should have told the API that this is the iexcloud-sandbox that I need to access,as described in the Error Message, but the description link dosent say anything about it: https://intercom.help/iexcloud/en/articles/2915433-testing-with-the-iex-cloud-sandbox and I don't know how get it to work, can anybody help?
Set your IEX_API_VERSION environment variable to iexcloud-sandbox:
os.environ['IEX_API_VERSION'] = 'iexcloud-sandbox'
Reference: https://github.com/addisonlynch/iexfinance/blob/7cf902e275f3f84b2892b87ff072fa1808926c15/docs/source/sandbox.rst
The reason why python os.getenv("IEX_SANDBOX")=="enable" works. The following code was retrieved from pdr_DataReader/iex/daily.py/IEXDailyReader itself.
if os.getenv("IEX_SANDBOX") == "enable":
self.sandbox = True
else:
self.sandbox = False
self.api_key = api_key
super(IEXDailyReader, self).__init__(
symbols=symbols,
start=start,
end=end,
retry_count=retry_count,
pause=pause,
session=session,
chunksize=chunksize,
)
#property
def default_start_date(self):
today = datetime.date.today()
return today - datetime.timedelta(days=365 * 15)
#property
def url(self):
"""API URL"""
if self.sandbox is True:
return "https://sandbox.iexapis.com/stable/stock/market/batch"
else:
return "https://cloud.iexapis.com/stable/stock/market/batch"
I was having the same issue as you and this worked for me:
import os
os.environ['IEX_SANDBOX'] = 'enable'
During the DataReader call, there is a check for this environment variable, and if you have set it to 'enable', then it will choose the sandbox URL.
I'm struggling to get a Lambda function working. I have a python script to access twitter API, pull information, and export that information into an excel sheet. I'm trying to transfer python script over to AWS/Lambda, and I'm having a lot of trouble.
What I've done so far: Created AWS account, setup S3 to have a bucket, and poked around trying to get things to work.
I think the main area I'm struggling is how to go from a python script that I'm executing via local CLI and transforming that code into lambda-capable code. I'm not sure I understand how the lambda_handler function works, what the event or context arguments actually mean (despite watching a half dozen different tutorial videos), or how to integrate my existing functions into Lambda in the context of the lambda_handler, and I'm just very confused and hoping someone might be able to help me get some clarity!
Code that I'm using to pull twitter data (just a sample):
import time
import datetime
import keys
import pandas as pd
from twython import Twython, TwythonError
import pymysql
def lambda_handler(event, context):
def oauth_authenticate():
twitter_oauth = Twython(keys.APP_KEY, keys.APP_SECRET, oauth_version=2)
ACCESS_TOKEN = twitter_oauth.obtain_access_token()
twitter = Twython(keys.APP_KEY, access_token = ACCESS_TOKEN)
return twitter
def get_username():
"""
Prompts for the screen name of targetted account
"""
username = input("Enter the Twitter screenname you'd like information on. Do not include '#':")
return username
def get_user_followers(username):
"""
Returns data on all accounts following the targetted user.
WARNING: The number of followers can be huge, and the data isn't very valuable
"""
#username = get_username()
#import pdb; pdb.set_trace()
twitter = oauth_authenticate()
datestamp = str(datetime.datetime.now().strftime("%Y-%m-%d"))
target = twitter.lookup_user(screen_name = username)
for y in target:
target_id = y['id_str']
next_cursor = -1
index = 0
followersdata = {}
while next_cursor:
try:
get_followers = twitter.get_followers_list(screen_name = username,
count = 200,
cursor = next_cursor)
for x in get_followers['users']:
followersdata[index] = {}
followersdata[index]['screen_name'] = x['screen_name']
followersdata[index]['id_str'] = x['id_str']
followersdata[index]['name'] = x['name']
followersdata[index]['description'] = x['description']
followersdata[index]['date_checked'] = datestamp
followersdata[index]['targeted_account_id'] = target_id
index = index + 1
next_cursor = get_followers["next_cursor"]
except TwythonError as e:
print(e)
remainder = (float(twitter.get_lastfunction_header(header = 'x-rate-limit-reset')) \
- time.time())+1
print("Rate limit exceeded. Waiting for:", remainder/60, "minutes")
print("Current Time is:", time.strftime("%I:%M:%S"))
del twitter
time.sleep(remainder)
twitter = oauth_authenticate()
continue
followersDF = pd.DataFrame.from_dict(followersdata, orient = "index")
followersDF.to_excel("%s-%s-follower list.xlsx" % (username, datestamp),
index = False, encoding = 'utf-8')