Compare commits

..

No commits in common. "5b7ff2495514ebcb5f306315457cb3dd0d9277e9" and "9292d4f991f68872470f39421391732168e82bb1" have entirely different histories.

5 changed files with 11 additions and 137 deletions

1
.gitignore vendored
View File

@ -65,4 +65,3 @@ docker/lnbits/data
# nostr link extract # nostr link extract
scripts/nostr-link-extract.config.json scripts/nostr-link-extract.config.json
scripts/nostr-links.db

View File

@ -1,5 +1,3 @@
import { SN_ADMIN_IDS } from '@/lib/constants'
export default { export default {
Query: { Query: {
snl: async (parent, _, { models }) => { snl: async (parent, _, { models }) => {
@ -9,7 +7,7 @@ export default {
}, },
Mutation: { Mutation: {
onAirToggle: async (parent, _, { models, me }) => { onAirToggle: async (parent, _, { models, me }) => {
if (!me || !SN_ADMIN_IDS.includes(me.id)) { if (me.id !== 616) {
throw new Error('not an admin') throw new Error('not an admin')
} }
const { id, live } = await models.snl.findFirst() const { id, live } = await models.snl.findFirst()

View File

@ -184,8 +184,6 @@ ed-kung,pr,#1926,#1927,easy,,,,100k,simplestacker@getalby.com,???
ed-kung,issue,#1926,#1927,easy,,,,10k,simplestacker@getalby.com,??? ed-kung,issue,#1926,#1927,easy,,,,10k,simplestacker@getalby.com,???
ed-kung,issue,#1913,#1890,good-first-issue,,,,2k,simplestacker@getalby.com,??? ed-kung,issue,#1913,#1890,good-first-issue,,,,2k,simplestacker@getalby.com,???
Scroogey-SN,pr,#1930,#1167,good-first-issue,,,,20k,Scroogey@coinos.io,??? Scroogey-SN,pr,#1930,#1167,good-first-issue,,,,20k,Scroogey@coinos.io,???
itsrealfake,issue,#1930,#1167,good-first-issue,,,,2k,smallimagination100035@getalby.com,??? itsrealfake,issue,#1930,#1167,good-first-issue,,,,2k,???,???
Scroogey-SN,pr,#1948,#1849,medium,urgent,,,750k,Scroogey@coinos.io,??? Scroogey-SN,pr,#1948,#1849,medium,urgent,,,750k,Scroogey@coinos.io,???
felipebueno,issue,#1947,#1945,good-first-issue,,,,2k,felipebueno@blink.sv,??? felipebueno,issue,#1947,#1945,good-first-issue,,,,2k,felipebueno@blink.sv,???
ed-kung,pr,#1952,#1951,easy,,,,100k,simplestacker@getalby.com,???
ed-kung,issue,#1952,#1951,easy,,,,10k,simplestacker@getalby.com,???

1 name type pr id issue ids difficulty priority changes requested notes amount receive method date paid
184 ed-kung issue #1926 #1927 easy 10k simplestacker@getalby.com ???
185 ed-kung issue #1913 #1890 good-first-issue 2k simplestacker@getalby.com ???
186 Scroogey-SN pr #1930 #1167 good-first-issue 20k Scroogey@coinos.io ???
187 itsrealfake issue #1930 #1167 good-first-issue 2k smallimagination100035@getalby.com ??? ???
188 Scroogey-SN pr #1948 #1849 medium urgent 750k Scroogey@coinos.io ???
189 felipebueno issue #1947 #1945 good-first-issue 2k felipebueno@blink.sv ???
ed-kung pr #1952 #1951 easy 100k simplestacker@getalby.com ???
ed-kung issue #1952 #1951 easy 10k simplestacker@getalby.com ???

View File

@ -1,4 +1,4 @@
Getting semantic search setup in OpenSearch is currently a multistep, manual process. To configure semantic search, enter the following commands into OpenSearch's REST API. You can do this in Dev Tools in the OpenSearch Dashboard (after starting your SN dev environment, point your browser to localhost:5601). You can also use CURL to send these commands to localhost:9200. Getting semantic search setup in OpenSearch is a multistep process.
### step 1: configure the ml plugin ### step 1: configure the ml plugin
```json ```json
@ -67,7 +67,7 @@ PUT /_ingest/pipeline/nlp-ingest-pipeline
}, },
{ {
"text_embedding": { "text_embedding": {
"model_id": "<model id>", "model_id": "6whlBY0B2sj1ObjeeD5d",
"field_map": { "field_map": {
"text": "text_embedding", "text": "text_embedding",
"title": "title_embedding" "title": "title_embedding"
@ -306,13 +306,3 @@ GET /item-nlp/_search
} }
``` ```
### step 12: configure the development environment to use the nlp pipeline
Add the following lines to `.env.local`:
```
OPENSEARCH_INDEX=item-nlp
OPENSEARCH_MODEL_ID=<model id>
```
Note that you won't have to re-do the above steps each time you restart your dev instance. The OpenSearch configuration is saved to a local volume.

123
scripts/nostr-link-extract.js Executable file → Normal file
View File

@ -1,12 +1,7 @@
#!/usr/bin/env node
const { execSync } = require('child_process')
module.paths.push(execSync('npm config get prefix').toString().trim() + '/lib/node_modules')
const WebSocket = require('ws') // You might need to install this: npm install ws const WebSocket = require('ws') // You might need to install this: npm install ws
const { nip19 } = require('nostr-tools') // Keep this for formatting const { nip19 } = require('nostr-tools') // Keep this for formatting
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
const sqlite3 = require('sqlite3').verbose() // Add this at the top with other requires
// ANSI color codes // ANSI color codes
const colors = { const colors = {
@ -44,91 +39,6 @@ const colors = {
} }
} }
// Add these new database utility functions after the color definitions but before the config
const db = {
connection: null,
async init () {
return new Promise((resolve, reject) => {
const dbPath = path.join(__dirname, 'nostr-links.db')
this.connection = new sqlite3.Database(dbPath, (err) => {
if (err) {
logger.error(`Error opening database: ${err.message}`)
reject(err)
return
}
this.connection.run(`
CREATE TABLE IF NOT EXISTS notes (
id TEXT PRIMARY KEY,
pubkey TEXT,
content TEXT,
created_at INTEGER,
metadata TEXT,
processed_at INTEGER
)
`, (err) => {
if (err) {
logger.error(`Error creating table: ${err.message}`)
reject(err)
return
}
resolve()
})
})
})
},
async getLatestNoteTimestamp () {
return new Promise((resolve, reject) => {
this.connection.get(
'SELECT MAX(created_at) as latest FROM notes',
(err, row) => {
if (err) {
reject(err)
return
}
resolve(row?.latest || 0)
}
)
})
},
async saveNote (note) {
return new Promise((resolve, reject) => {
const metadata = note.userMetadata ? JSON.stringify(note.userMetadata) : null
this.connection.run(
`INSERT OR IGNORE INTO notes (id, pubkey, content, created_at, metadata, processed_at)
VALUES (?, ?, ?, ?, ?, ?)`,
[note.id, note.pubkey, note.content, note.created_at, metadata, Math.floor(Date.now() / 1000)],
(err) => {
if (err) {
reject(err)
return
}
resolve()
}
)
})
},
async close () {
return new Promise((resolve, reject) => {
if (this.connection) {
this.connection.close((err) => {
if (err) {
reject(err)
return
}
resolve()
})
} else {
resolve()
}
})
}
}
// Default configuration // Default configuration
let config = { let config = {
userPubkeys: [], userPubkeys: [],
@ -326,16 +236,9 @@ async function fetchEvents (relayUrls, filter, timeoutMs = 10000) {
* @returns {Promise<Array>} - Array of note objects containing external links within the time interval * @returns {Promise<Array>} - Array of note objects containing external links within the time interval
*/ */
async function getNotesWithLinks (userPubkeys, timeIntervalHours, relayUrls, ignorePubkeys = []) { async function getNotesWithLinks (userPubkeys, timeIntervalHours, relayUrls, ignorePubkeys = []) {
// Get the latest stored note timestamp
const latestStoredTimestamp = await db.getLatestNoteTimestamp()
// Calculate the cutoff time in seconds (Nostr uses UNIX timestamp) // Calculate the cutoff time in seconds (Nostr uses UNIX timestamp)
const now = Math.floor(Date.now() / 1000) const now = Math.floor(Date.now() / 1000)
// Use the later of: configured time interval or latest stored note const cutoffTime = now - (timeIntervalHours * 60 * 60)
const configuredCutoff = now - (timeIntervalHours * 60 * 60)
const cutoffTime = Math.max(configuredCutoff, latestStoredTimestamp)
logger.debug(`Using cutoff time: ${new Date(cutoffTime * 1000).toISOString()}`)
const allNotesWithLinks = [] const allNotesWithLinks = []
const allFollowedPubkeys = new Set() // To collect all followed pubkeys const allFollowedPubkeys = new Set() // To collect all followed pubkeys
@ -492,11 +395,6 @@ async function getNotesWithLinks (userPubkeys, timeIntervalHours, relayUrls, ign
logger.progress(`Completed processing all ${totalBatches} batches`) logger.progress(`Completed processing all ${totalBatches} batches`)
} }
// After processing notes and before returning, save them to the database
for (const note of allNotesWithLinks) {
await db.saveNote(note)
}
return allNotesWithLinks return allNotesWithLinks
} }
@ -507,12 +405,9 @@ async function getNotesWithLinks (userPubkeys, timeIntervalHours, relayUrls, ign
* @returns {String} - Formatted string with note information * @returns {String} - Formatted string with note information
*/ */
function formatNoteOutput (notes) { function formatNoteOutput (notes) {
// Sort notes by timestamp (newest first)
const sortedNotes = [...notes].sort((a, b) => b.created_at - a.created_at)
const output = [] const output = []
for (const note of sortedNotes) { for (const note of notes) {
// Get note ID as npub // Get note ID as npub
const noteId = nip19.noteEncode(note.id) const noteId = nip19.noteEncode(note.id)
const pubkey = nip19.npubEncode(note.pubkey) const pubkey = nip19.npubEncode(note.pubkey)
@ -605,15 +500,12 @@ function normalizeToHexPubkey (key) {
* Main function to execute the script * Main function to execute the script
*/ */
async function main () { async function main () {
// Initialize database // Load configuration from file
await db.init() const configPath = path.join(__dirname, 'nostr-link-extract.config.json')
logger.info(`Loading configuration from ${configPath}`)
config = loadConfig(configPath)
try { try {
// Load configuration from file
const configPath = path.join(__dirname, 'nostr-link-extract.config.json')
logger.info(`Loading configuration from ${configPath}`)
config = loadConfig(configPath)
logger.info(`Starting Nostr link extraction (time interval: ${config.timeIntervalHours} hours)`) logger.info(`Starting Nostr link extraction (time interval: ${config.timeIntervalHours} hours)`)
// Convert any npub format keys to hex // Convert any npub format keys to hex
@ -644,9 +536,6 @@ async function main () {
} }
} catch (error) { } catch (error) {
logger.error(`${error}`) logger.error(`${error}`)
} finally {
// Close database connection
await db.close()
} }
} }