updated prompt/ added twim
parent
39a6a5775a
commit
32a6ba2c35
|
@ -1,6 +1,6 @@
|
|||
# text-gen-bot
|
||||
|
||||
Matrix bot that generates messages based off of messages of other users using a neural network. The first Matrix AI?
|
||||
Matrix bot that generates messages based off of messages of other users using a neural network. The first Matrix AI? [Featured on TWIM!](https://matrix.org/blog/2022/08/26/this-week-in-matrix-2022-08-26#dept-of-interesting-projects-%EF%B8%8F)
|
||||
|
||||
Note: Project is still being developed and some functionality is not fully implemented yet.
|
||||
|
||||
|
|
12
index.js
12
index.js
|
@ -2,6 +2,7 @@ import config from './config.json' assert {type: "json"};
|
|||
import { MatrixClient, SimpleFsStorageProvider, AutojoinRoomsMixin } from "matrix-bot-sdk";
|
||||
import fs from "fs";
|
||||
import { PythonShell } from 'python-shell';
|
||||
import { type } from 'os';
|
||||
|
||||
const storage = new SimpleFsStorageProvider("storage.json");
|
||||
const client = new MatrixClient(config.homeserver, config.token, storage);
|
||||
|
@ -20,13 +21,13 @@ client.on("room.message", (roomId, event) => {
|
|||
messageCounters.set(roomId, (messageCounters.get(roomId) ?? 0) + 1);
|
||||
let userMessage = event["content"]["body"].split(" ");
|
||||
|
||||
console.log(`COUNTER:\t${messageCounters.get(roomId)}\t${roomId}\t${userMessage}`);
|
||||
console.log(`COUNTER:\t${messageCounters.get(roomId)}\t${roomId}\t${userMessage.join(" ")}`);
|
||||
|
||||
|
||||
if (userMessage[0].startsWith(config.prefix)) {
|
||||
userMessage[0] = userMessage[0].replace(config.prefix, '').toLowerCase();
|
||||
} else {
|
||||
fs.appendFile(config.file, userMessage.join(' ') + "\n", function (err) {
|
||||
fs.appendFile(config.file, userMessage.join(" ") + "\n", function (err) {
|
||||
if (err) throw err;
|
||||
});
|
||||
};
|
||||
|
@ -38,8 +39,11 @@ client.on("room.message", (roomId, event) => {
|
|||
console.log("Generating message...");
|
||||
|
||||
userMessage.shift()
|
||||
const options = { args: ['generate'] };
|
||||
|
||||
userMessage = userMessage.join(" ")
|
||||
fs.appendFile(config.file, userMessage + "\n", function (err) {
|
||||
if (err) throw err;
|
||||
});
|
||||
const options = { args: ['generate', userMessage] };
|
||||
PythonShell.run(pyFile, options, (err, message) => {
|
||||
if (err) throw err;
|
||||
client.sendText(roomId, message.toString());
|
||||
|
|
|
@ -13,10 +13,11 @@ file_name = json_object['file']
|
|||
# ? generate message using trained model
|
||||
|
||||
|
||||
def generate_message(*prompt):
|
||||
def generate_message(prompt=None):
|
||||
# ai = aitextgen(prompt=prompt)
|
||||
ai = aitextgen(model_folder="trained_model",
|
||||
tokenizer_file="aitextgen.tokenizer.json", prompt=prompt)
|
||||
ai.generate()
|
||||
return ai.generate()
|
||||
|
||||
# ? train model using text file
|
||||
|
||||
|
@ -37,9 +38,9 @@ match sys.argv[1]:
|
|||
# ? send message to parent JS process
|
||||
try:
|
||||
prompt = ' '.join(map(str, sys.argv[2:]))
|
||||
print(generate_message(prompt))
|
||||
generate_message(prompt)
|
||||
except IndexError:
|
||||
print(generate_message())
|
||||
generate_message()
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
case "train":
|
||||
|
|
Loading…
Reference in New Issue