In this example, we are going to create a single-file agent to retrieve a tweet from X and reply to it. Once we have successfully fetched the tweet, we will utilize Syntax Agent's API capabilities to generate a response. After that, you can reply to the original tweet with a generated response. This process will allow us to seamlessly integrate tweet retrieval with automated response generation, showcasing the potential of utilizing APIs for enhanced interaction.
Set up the environment
Before proceeding, create a .env file with the following content. You can generate SYNTAX_API_KEY in the Creator Dashboard page. To get the TWITTER_COOKIE_CT0 and TWITTER_COOKIE_AUTH values, you need to log in to your X account and get data from the developer tools.
.env
TWITTER_USERNAME=
TWITTER_PASSWORD=
# Log in the browser with your account and copy the `ct0` and `auth_token` cookies.
# (You can find them in the Application => Cookies in the dev tools)
TWITTER_COOKIE_CT0=
TWITTER_COOKIE_AUTH=
SYNTAX_API_KEY=
SYNTAX_API="http://api.spectrallabs.xyz"
Initialize your project based on the preferred programming language.
Once you successfully set up the project, create a file that acts as your agent. We've commented out the code that generates the reply in the last line to avoid accidentally submitting a reply to the original tweet. Feel free to uncomment it if you want.
index.js
import 'dotenv/config';
import { Scraper } from 'agent-twitter-client';
const username = process.env.TWITTER_USERNAME;
const password = process.env.TWITTER_PASSWORD;
const syntaxApi = process.env.SYNTAX_API;
const syntaxApiKey = process.env.SYNTAX_API_KEY;
if (!username || !password || !syntaxApi || !syntaxApiKey) {
console.error("Invalid env vars. Please check your .env file.");
process.exit(1);
}
const [targetUser] = process.argv.slice(2);
if (!targetUser) {
console.error("Please provide a valid twitter username.");
process.exit(1);
}
const scraper = new Scraper();
await scraper.login(username, password);
async function chat(content) {
return await fetch(`${syntaxApi}/chat`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${syntaxApiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
content
}),
})
.then((res) => res.json())
.then((res) => res.data.content)
.catch((err) => console.error(err.message));
}
console.log('Feching tweets from:', targetUser);
const tweets = await scraper.getTweets(targetUser, 1);
const [tweet] = await Array.fromAsync(tweets);
console.log('Fetched tweet:', tweet.text);
const content = `
Hey, reply to this tweet from ${tweet.name}?
Tweet: ${tweet.text}
${tweet.isQuoted ? `The tweet quoted this tweet: ${tweet.quotedStatus.text}` : ''}
`;
const result = await chat(content);
console.log('Generate response from the agent:', result);
// Make a reply to the original tweet
// await scraper.sendTweet(result, tweet.id).then(res => res.json());
run.py
from twitter.scraper import Scraper
from twitter.account import Account
from twitter.util import find_key
from dotenv import load_dotenv
import os
import sys
import requests
load_dotenv()
syntax_api = os.getenv('SYNTAX_API')
syntax_api_key = os.getenv('SYNTAX_API_KEY')
twitter_ct0 = os.getenv('TWITTER_COOKIE_CT0')
twitter_auth = os.getenv('TWITTER_COOKIE_AUTH')
def chat(content):
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + syntax_api_key
}
data = {
'content': content
}
response = requests.post(syntax_api + '/chat', json=data, headers=headers)
response.raise_for_status()
result = response.json().get('data', {}).get('content')
return result
if __name__ == '__main__':
if syntax_api is None or syntax_api_key is None or twitter_ct0 is None or twitter_auth is None:
print('Invalid env vars. Please check your .env file.')
sys.exit(1)
if len(sys.argv) < 2:
print('Please provide target user.')
sys.exit(1)
targetUser = sys.argv[1]
scraper = Scraper(cookies={'ct0': twitter_ct0, 'auth_token': twitter_auth})
account = Account(cookies={'ct0': twitter_ct0, 'auth_token': twitter_auth})
users = scraper.users([targetUser])
user_id = users[0].get('data', {}).get('user', {}).get('result', {}).get('rest_id')
tweets = scraper.tweets([user_id], limit=1)
print(f'Fetching tweets from: {targetUser}')
tweet_legacies = list({
'user': x.get('result', {}).get('core', {}).get('user_results', {}).get('result', {}).get('legacy'),
'tweet': x.get('result', {}).get('legacy'),
'quoted': x.get('result', {}).get('quoted_status_result', {}).get('result', {}).get('legacy')
} for x in find_key(tweets, 'tweet_results'))
print(f'Fetched tweet: {tweet_legacies[0]["tweet"]["full_text"]}')
prompt = f"""
Hey, reply this tweet from {tweet_legacies[0]['user']['name']}?
Tweet: {tweet_legacies[0]['tweet']['full_text']}
{"" if tweet_legacies[0]['quoted'] is None else f"Quoted Tweet: {tweet_legacies[0]['quoted']['full_text']}"}
"""
result = chat(prompt)
print(f'Generated response from the agent: {result}')
# Make a reply to the original tweet
# account.reply(result, tweet_id=tweet_legacies[0]['tweet']['id_str'])
You can test and run the agent with the following commands. Don't forget to pass a valid X account name.