added
This commit is contained in:
@@ -64,6 +64,48 @@ func (db *DB) migrate() error {
|
||||
published_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS prices (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ticker TEXT NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
open REAL,
|
||||
high REAL,
|
||||
low REAL,
|
||||
close REAL,
|
||||
volume INTEGER,
|
||||
UNIQUE(ticker, date)
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS signals (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ticker TEXT NOT NULL UNIQUE,
|
||||
price REAL,
|
||||
change_pct REAL,
|
||||
rsi14 REAL,
|
||||
macd REAL,
|
||||
macd_signal REAL,
|
||||
macd_hist REAL,
|
||||
sma20 REAL,
|
||||
sma50 REAL,
|
||||
volume INTEGER,
|
||||
avg_volume20 INTEGER,
|
||||
alert TEXT DEFAULT '',
|
||||
computed_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS insider_trades (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ticker TEXT NOT NULL,
|
||||
insider_name TEXT,
|
||||
insider_title TEXT,
|
||||
transaction_code TEXT,
|
||||
shares REAL,
|
||||
price REAL,
|
||||
total_value REAL,
|
||||
transaction_date DATE,
|
||||
accession_no TEXT UNIQUE,
|
||||
filing_url TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_insider_ticker ON insider_trades(ticker)`,
|
||||
}
|
||||
|
||||
for _, q := range queries {
|
||||
@@ -71,5 +113,27 @@ func (db *DB) migrate() error {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Migrations additives — on ignore les erreurs si la colonne/index existe déjà
|
||||
additive := []string{
|
||||
`ALTER TABLE news ADD COLUMN finnhub_id INTEGER`,
|
||||
`CREATE UNIQUE INDEX IF NOT EXISTS idx_news_finnhub_id ON news(finnhub_id) WHERE finnhub_id IS NOT NULL`,
|
||||
`ALTER TABLE signals ADD COLUMN market_cap INTEGER DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN short_ratio REAL DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN score INTEGER DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN on_etoro INTEGER DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN week52_high REAL DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN week52_low REAL DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN pct_from_high REAL DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN insider_value_30d REAL DEFAULT 0`,
|
||||
`ALTER TABLE signals ADD COLUMN source TEXT DEFAULT 'watchlist'`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_instruments_ticker ON instruments(ticker)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_signals_score ON signals(score DESC)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_signals_source ON signals(source)`,
|
||||
}
|
||||
for _, q := range additive {
|
||||
db.Exec(q) // intentionnellement sans vérification d'erreur
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -0,0 +1,300 @@
|
||||
package edgar
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
baseURL = "https://data.sec.gov"
|
||||
archiveURL = "https://www.sec.gov/Archives/edgar/data"
|
||||
userAgent = "StockRadar legreg002@hotmail.com"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
http *http.Client
|
||||
cikMap map[string]string // ticker → CIK (zero-padded 10 digits)
|
||||
cikOnce sync.Once
|
||||
}
|
||||
|
||||
// InsiderTrade représente une transaction Form 4 parsée.
|
||||
type InsiderTrade struct {
|
||||
Ticker string
|
||||
InsiderName string
|
||||
InsiderTitle string
|
||||
TransactionCode string // P=purchase, S=sale, A=award, etc.
|
||||
Shares float64
|
||||
PricePerShare float64
|
||||
TotalValue float64
|
||||
TransactionDate string
|
||||
AccessionNo string
|
||||
FilingURL string
|
||||
}
|
||||
|
||||
// ---- types pour le parsing JSON/XML ----
|
||||
|
||||
type tickerEntry struct {
|
||||
CIK int `json:"cik_str"`
|
||||
Ticker string `json:"ticker"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type submissionsResponse struct {
|
||||
Filings struct {
|
||||
Recent struct {
|
||||
Form []string `json:"form"`
|
||||
AccessionNumber []string `json:"accessionNumber"`
|
||||
FilingDate []string `json:"filingDate"`
|
||||
PrimaryDocument []string `json:"primaryDocument"`
|
||||
} `json:"recent"`
|
||||
} `json:"filings"`
|
||||
}
|
||||
|
||||
type form4Doc struct {
|
||||
Issuer struct {
|
||||
Symbol string `xml:"issuerTradingSymbol"`
|
||||
} `xml:"issuer"`
|
||||
ReportingOwner struct {
|
||||
ID struct {
|
||||
Name string `xml:"rptOwnerName"`
|
||||
} `xml:"reportingOwnerId"`
|
||||
Relationship struct {
|
||||
IsDirector int `xml:"isDirector"`
|
||||
IsOfficer int `xml:"isOfficer"`
|
||||
Title string `xml:"officerTitle"`
|
||||
} `xml:"reportingOwnerRelationship"`
|
||||
} `xml:"reportingOwner"`
|
||||
NonDerivativeTable struct {
|
||||
Transactions []nonDerivativeTx `xml:"nonDerivativeTransaction"`
|
||||
} `xml:"nonDerivativeTable"`
|
||||
}
|
||||
|
||||
type nonDerivativeTx struct {
|
||||
Date struct {
|
||||
Value string `xml:"value"`
|
||||
} `xml:"transactionDate"`
|
||||
Coding struct {
|
||||
Code string `xml:"transactionCode"`
|
||||
} `xml:"transactionCoding"`
|
||||
Amounts struct {
|
||||
Shares struct {
|
||||
Value float64 `xml:"value"`
|
||||
} `xml:"transactionShares"`
|
||||
Price struct {
|
||||
Value float64 `xml:"value"`
|
||||
} `xml:"transactionPricePerShare"`
|
||||
AcqDisp struct {
|
||||
Value string `xml:"value"`
|
||||
} `xml:"transactionAcquiredDisposedCode"`
|
||||
} `xml:"transactionAmounts"`
|
||||
}
|
||||
|
||||
// ---- constructeur ----
|
||||
|
||||
func New() *Client {
|
||||
return &Client{
|
||||
http: &http.Client{Timeout: 15 * time.Second},
|
||||
}
|
||||
}
|
||||
|
||||
// ---- API publique ----
|
||||
|
||||
// RecentInsiderBuys retourne les achats d'initiés (code P) pour un ticker
|
||||
// sur les 30 derniers jours.
|
||||
func (c *Client) RecentInsiderBuys(ticker string) ([]InsiderTrade, error) {
|
||||
cik, err := c.lookupCIK(ticker)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("CIK not found for %s: %w", ticker, err)
|
||||
}
|
||||
|
||||
accessions, docs, dates, err := c.recentForm4Filings(cik, 30)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cutoff := time.Now().AddDate(0, 0, -30).Format("2006-01-02")
|
||||
var trades []InsiderTrade
|
||||
|
||||
for i, acc := range accessions {
|
||||
if i >= len(dates) || dates[i] < cutoff {
|
||||
continue
|
||||
}
|
||||
primaryDoc := ""
|
||||
if i < len(docs) {
|
||||
primaryDoc = docs[i]
|
||||
}
|
||||
|
||||
form4Trades, err := c.parseForm4(cik, acc, primaryDoc, ticker)
|
||||
if err != nil {
|
||||
continue // on skip les erreurs de parsing individuelles
|
||||
}
|
||||
trades = append(trades, form4Trades...)
|
||||
time.Sleep(120 * time.Millisecond) // EDGAR rate limit
|
||||
}
|
||||
|
||||
return trades, nil
|
||||
}
|
||||
|
||||
// ---- méthodes internes ----
|
||||
|
||||
func (c *Client) lookupCIK(ticker string) (string, error) {
|
||||
if err := c.loadCIKMap(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
cik, ok := c.cikMap[strings.ToUpper(ticker)]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("ticker %s not found", ticker)
|
||||
}
|
||||
return cik, nil
|
||||
}
|
||||
|
||||
func (c *Client) loadCIKMap() error {
|
||||
var loadErr error
|
||||
c.cikOnce.Do(func() {
|
||||
resp, err := c.get("https://www.sec.gov/files/company_tickers.json")
|
||||
if err != nil {
|
||||
loadErr = err
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var raw map[string]tickerEntry
|
||||
if err := json.NewDecoder(resp.Body).Decode(&raw); err != nil {
|
||||
loadErr = err
|
||||
return
|
||||
}
|
||||
|
||||
c.cikMap = make(map[string]string, len(raw))
|
||||
for _, entry := range raw {
|
||||
padded := fmt.Sprintf("%010d", entry.CIK)
|
||||
c.cikMap[strings.ToUpper(entry.Ticker)] = padded
|
||||
}
|
||||
})
|
||||
return loadErr
|
||||
}
|
||||
|
||||
func (c *Client) recentForm4Filings(cik string, maxDays int) (accessions, docs, dates []string, err error) {
|
||||
url := fmt.Sprintf("%s/submissions/CIK%s.json", baseURL, cik)
|
||||
resp, err := c.get(url)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var sub submissionsResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&sub); err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
cutoff := time.Now().AddDate(0, 0, -maxDays).Format("2006-01-02")
|
||||
forms := sub.Filings.Recent.Form
|
||||
accs := sub.Filings.Recent.AccessionNumber
|
||||
pdocs := sub.Filings.Recent.PrimaryDocument
|
||||
fdates := sub.Filings.Recent.FilingDate
|
||||
|
||||
for i, form := range forms {
|
||||
if form != "4" {
|
||||
continue
|
||||
}
|
||||
if i < len(fdates) && fdates[i] < cutoff {
|
||||
break // filings are sorted newest first, stop when too old
|
||||
}
|
||||
if i < len(accs) {
|
||||
accessions = append(accessions, accs[i])
|
||||
}
|
||||
if i < len(pdocs) {
|
||||
docs = append(docs, pdocs[i])
|
||||
}
|
||||
if i < len(fdates) {
|
||||
dates = append(dates, fdates[i])
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c *Client) parseForm4(cik, accessionNo, primaryDoc, ticker string) ([]InsiderTrade, error) {
|
||||
// Construire l'URL du document XML
|
||||
accNoDashes := strings.ReplaceAll(accessionNo, "-", "")
|
||||
|
||||
xmlFile := primaryDoc
|
||||
if xmlFile == "" || !strings.HasSuffix(xmlFile, ".xml") {
|
||||
// Fallback : essayer le nom conventionnel
|
||||
xmlFile = accessionNo + ".xml"
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/%s/%s/%s", archiveURL, cik, accNoDashes, xmlFile)
|
||||
filingURL := fmt.Sprintf("https://www.sec.gov/Archives/edgar/data/%s/%s/%s", cik, accNoDashes, xmlFile)
|
||||
|
||||
resp, err := c.get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var doc form4Doc
|
||||
if err := xml.NewDecoder(resp.Body).Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
insiderName := doc.ReportingOwner.ID.Name
|
||||
insiderTitle := doc.ReportingOwner.Relationship.Title
|
||||
if insiderTitle == "" {
|
||||
if doc.ReportingOwner.Relationship.IsDirector == 1 {
|
||||
insiderTitle = "Director"
|
||||
} else if doc.ReportingOwner.Relationship.IsOfficer == 1 {
|
||||
insiderTitle = "Officer"
|
||||
}
|
||||
}
|
||||
|
||||
var trades []InsiderTrade
|
||||
for _, tx := range doc.NonDerivativeTable.Transactions {
|
||||
code := tx.Coding.Code
|
||||
// On garde achats (P) et attributions significatives (A avec prix > 0)
|
||||
if code != "P" && !(code == "A" && tx.Amounts.Price.Value > 0) {
|
||||
continue
|
||||
}
|
||||
shares := tx.Amounts.Shares.Value
|
||||
price := tx.Amounts.Price.Value
|
||||
if shares <= 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
trades = append(trades, InsiderTrade{
|
||||
Ticker: ticker,
|
||||
InsiderName: insiderName,
|
||||
InsiderTitle: insiderTitle,
|
||||
TransactionCode: code,
|
||||
Shares: shares,
|
||||
PricePerShare: price,
|
||||
TotalValue: shares * price,
|
||||
TransactionDate: tx.Date.Value,
|
||||
AccessionNo: accessionNo,
|
||||
FilingURL: filingURL,
|
||||
})
|
||||
}
|
||||
return trades, nil
|
||||
}
|
||||
|
||||
func (c *Client) get(url string) (*http.Response, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
req.Header.Set("Accept", "application/json, application/xml, text/xml")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
resp.Body.Close()
|
||||
return nil, fmt.Errorf("EDGAR HTTP %d: %s", resp.StatusCode, url)
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
package edgar
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
)
|
||||
|
||||
type Poller struct {
|
||||
db *db.DB
|
||||
client *Client
|
||||
ticker *time.Ticker
|
||||
done chan struct{}
|
||||
lastRun time.Time
|
||||
}
|
||||
|
||||
func NewPoller(database *db.DB) *Poller {
|
||||
return &Poller{
|
||||
db: database,
|
||||
client: New(),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Poller) Start() {
|
||||
p.ticker = time.NewTicker(6 * time.Hour)
|
||||
go func() {
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("edgar poller: initial sync: %v", err)
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-p.ticker.C:
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("edgar poller: sync: %v", err)
|
||||
}
|
||||
case <-p.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (p *Poller) Stop() {
|
||||
if p.ticker != nil {
|
||||
p.ticker.Stop()
|
||||
}
|
||||
close(p.done)
|
||||
}
|
||||
|
||||
func (p *Poller) Sync() error {
|
||||
tickers, err := p.watchlistTickers()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(tickers) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("edgar: scanning %d tickers for insider trades…", len(tickers))
|
||||
total := 0
|
||||
|
||||
for _, sym := range tickers {
|
||||
trades, err := p.client.RecentInsiderBuys(sym)
|
||||
if err != nil {
|
||||
log.Printf("edgar: %s: %v", sym, err)
|
||||
continue
|
||||
}
|
||||
for _, t := range trades {
|
||||
if p.insertTrade(t) {
|
||||
total++
|
||||
}
|
||||
}
|
||||
time.Sleep(500 * time.Millisecond) // respecter le rate limit EDGAR
|
||||
}
|
||||
|
||||
p.lastRun = time.Now()
|
||||
if total > 0 {
|
||||
log.Printf("edgar: sync done — %d nouveaux insider trades", total)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Poller) watchlistTickers() ([]string, error) {
|
||||
rows, err := p.db.Query(`SELECT ticker FROM watchlist WHERE active=1`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tickers []string
|
||||
for rows.Next() {
|
||||
var t string
|
||||
if err := rows.Scan(&t); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tickers = append(tickers, t)
|
||||
}
|
||||
return tickers, nil
|
||||
}
|
||||
|
||||
func (p *Poller) insertTrade(t InsiderTrade) bool {
|
||||
res, err := p.db.Exec(`
|
||||
INSERT OR IGNORE INTO insider_trades
|
||||
(ticker, insider_name, insider_title, transaction_code,
|
||||
shares, price, total_value, transaction_date, accession_no, filing_url)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, t.Ticker, t.InsiderName, t.InsiderTitle, t.TransactionCode,
|
||||
t.Shares, t.PricePerShare, t.TotalValue, t.TransactionDate,
|
||||
t.AccessionNo, t.FilingURL)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
n, _ := res.RowsAffected()
|
||||
return n > 0
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
package etoro
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
const instrumentsURL = "https://api.etoro.com/metadata/instruments"
|
||||
|
||||
// InstrumentTypeID connus sur eToro
|
||||
const (
|
||||
TypeStock = 5
|
||||
TypeETF = 10
|
||||
TypeCrypto = 12
|
||||
TypeIndex = 21
|
||||
TypeCFD = 6
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
type Instrument struct {
|
||||
InstrumentID int `json:"InstrumentID"`
|
||||
InstrumentDisplayName string `json:"InstrumentDisplayName"`
|
||||
SymbolFull string `json:"SymbolFull"`
|
||||
InstrumentTypeID int `json:"InstrumentTypeID"`
|
||||
IsActive bool `json:"IsActive"`
|
||||
StockIndustryID int `json:"StockIndustryID"`
|
||||
StockExchangeID int `json:"StockExchangeID"`
|
||||
}
|
||||
|
||||
func New() *Client {
|
||||
return &Client{
|
||||
http: &http.Client{Timeout: 30 * time.Second},
|
||||
}
|
||||
}
|
||||
|
||||
// FetchStocks retourne tous les instruments de type Stock actifs sur eToro.
|
||||
func (c *Client) FetchStocks() ([]Instrument, error) {
|
||||
all, err := c.fetchAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var stocks []Instrument
|
||||
for _, inst := range all {
|
||||
if inst.IsActive && inst.InstrumentTypeID == TypeStock {
|
||||
stocks = append(stocks, inst)
|
||||
}
|
||||
}
|
||||
return stocks, nil
|
||||
}
|
||||
|
||||
// FetchAll retourne tous les instruments (stocks + ETFs + crypto + indices).
|
||||
func (c *Client) FetchAll() ([]Instrument, error) {
|
||||
return c.fetchAll()
|
||||
}
|
||||
|
||||
func (c *Client) fetchAll() ([]Instrument, error) {
|
||||
req, err := http.NewRequest("GET", instrumentsURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Headers qui imitent le client web eToro
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
|
||||
req.Header.Set("accounttype", "Demo")
|
||||
req.Header.Set("ApplicationIdentifier", "ReToro")
|
||||
req.Header.Set("Version", "1.211.0")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("etoro: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("etoro: HTTP %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var instruments []Instrument
|
||||
if err := json.NewDecoder(resp.Body).Decode(&instruments); err != nil {
|
||||
return nil, fmt.Errorf("etoro: parse error: %w", err)
|
||||
}
|
||||
|
||||
if len(instruments) == 0 {
|
||||
return nil, fmt.Errorf("etoro: empty response — l'API a peut-être changé")
|
||||
}
|
||||
|
||||
return instruments, nil
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
package etoro
|
||||
|
||||
import (
|
||||
"log"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
)
|
||||
|
||||
type SyncStatus struct {
|
||||
Syncing bool `json:"syncing"`
|
||||
Progress int `json:"progress"`
|
||||
Total int `json:"total"`
|
||||
Count int `json:"count"`
|
||||
LastSync time.Time `json:"last_sync"`
|
||||
LastError string `json:"last_error,omitempty"`
|
||||
}
|
||||
|
||||
type Poller struct {
|
||||
db *db.DB
|
||||
client *Client
|
||||
ticker *time.Ticker
|
||||
done chan struct{}
|
||||
|
||||
mu sync.Mutex
|
||||
syncing bool
|
||||
progress int
|
||||
total int
|
||||
lastSync time.Time
|
||||
lastError string
|
||||
}
|
||||
|
||||
func NewPoller(database *db.DB) *Poller {
|
||||
return &Poller{
|
||||
db: database,
|
||||
client: New(),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Poller) Start() {
|
||||
p.ticker = time.NewTicker(24 * time.Hour)
|
||||
go func() {
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("etoro poller: initial sync: %v", err)
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-p.ticker.C:
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("etoro poller: sync: %v", err)
|
||||
}
|
||||
case <-p.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (p *Poller) Stop() {
|
||||
if p.ticker != nil {
|
||||
p.ticker.Stop()
|
||||
}
|
||||
close(p.done)
|
||||
}
|
||||
|
||||
func (p *Poller) Status() SyncStatus {
|
||||
p.mu.Lock()
|
||||
defer p.mu.Unlock()
|
||||
return SyncStatus{
|
||||
Syncing: p.syncing,
|
||||
Progress: p.progress,
|
||||
Total: p.total,
|
||||
Count: p.dbCount(),
|
||||
LastSync: p.lastSync,
|
||||
LastError: p.lastError,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Poller) Sync() error {
|
||||
p.mu.Lock()
|
||||
if p.syncing {
|
||||
p.mu.Unlock()
|
||||
return nil // déjà en cours
|
||||
}
|
||||
p.syncing = true
|
||||
p.progress = 0
|
||||
p.total = 0
|
||||
p.lastError = ""
|
||||
p.mu.Unlock()
|
||||
|
||||
defer func() {
|
||||
p.mu.Lock()
|
||||
p.syncing = false
|
||||
p.lastSync = time.Now()
|
||||
p.mu.Unlock()
|
||||
}()
|
||||
|
||||
log.Println("etoro: fetching instruments…")
|
||||
stocks, err := p.client.FetchStocks()
|
||||
if err != nil {
|
||||
p.mu.Lock()
|
||||
p.lastError = err.Error()
|
||||
p.mu.Unlock()
|
||||
log.Printf("etoro: fetch error: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
p.mu.Lock()
|
||||
p.total = len(stocks)
|
||||
p.mu.Unlock()
|
||||
|
||||
log.Printf("etoro: %d stocks à synchroniser", len(stocks))
|
||||
|
||||
inserted := 0
|
||||
for i, s := range stocks {
|
||||
_, err := p.db.Exec(`
|
||||
INSERT INTO instruments (instrument_id, ticker, name, exchange_id, asset_class_id, synced_at)
|
||||
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(instrument_id) DO UPDATE SET
|
||||
ticker = excluded.ticker,
|
||||
name = excluded.name,
|
||||
exchange_id = excluded.exchange_id,
|
||||
synced_at = CURRENT_TIMESTAMP
|
||||
`, s.InstrumentID, s.SymbolFull, s.InstrumentDisplayName,
|
||||
s.StockExchangeID, s.InstrumentTypeID)
|
||||
if err == nil {
|
||||
inserted++
|
||||
}
|
||||
|
||||
if (i+1)%100 == 0 || i+1 == len(stocks) {
|
||||
p.mu.Lock()
|
||||
p.progress = i + 1
|
||||
p.mu.Unlock()
|
||||
log.Printf("etoro: %d/%d instruments traités", i+1, len(stocks))
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("etoro: sync terminée — %d/%d instruments en DB", inserted, len(stocks))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Poller) dbCount() int {
|
||||
var n int
|
||||
p.db.QueryRow(`SELECT COUNT(*) FROM instruments`).Scan(&n)
|
||||
return n
|
||||
}
|
||||
|
||||
// IsEtoro vérifie si un ticker est dans l'univers eToro.
|
||||
func IsEtoro(database *db.DB, ticker string) bool {
|
||||
var count int
|
||||
database.QueryRow(`SELECT COUNT(*) FROM instruments WHERE ticker = ?`, ticker).Scan(&count)
|
||||
return count > 0
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package finnhub
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
const baseURL = "https://finnhub.io/api/v1"
|
||||
|
||||
type Client struct {
|
||||
apiKey string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
type NewsItem struct {
|
||||
ID int `json:"id"`
|
||||
Category string `json:"category"`
|
||||
Datetime int64 `json:"datetime"`
|
||||
Headline string `json:"headline"`
|
||||
Related string `json:"related"`
|
||||
Source string `json:"source"`
|
||||
URL string `json:"url"`
|
||||
Summary string `json:"summary"`
|
||||
}
|
||||
|
||||
func New(apiKey string) *Client {
|
||||
return &Client{
|
||||
apiKey: apiKey,
|
||||
http: &http.Client{Timeout: 10 * time.Second},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) CompanyNews(symbol, from, to string) ([]NewsItem, error) {
|
||||
url := fmt.Sprintf("%s/company-news?symbol=%s&from=%s&to=%s&token=%s",
|
||||
baseURL, symbol, from, to, c.apiKey)
|
||||
return c.fetchNews(url)
|
||||
}
|
||||
|
||||
func (c *Client) MarketNews() ([]NewsItem, error) {
|
||||
url := fmt.Sprintf("%s/news?category=general&token=%s", baseURL, c.apiKey)
|
||||
return c.fetchNews(url)
|
||||
}
|
||||
|
||||
func (c *Client) Ping() error {
|
||||
url := fmt.Sprintf("%s/news?category=general&minId=999999999&token=%s", baseURL, c.apiKey)
|
||||
resp, err := c.http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == 401 || resp.StatusCode == 403 {
|
||||
return fmt.Errorf("invalid API key (HTTP %d)", resp.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) fetchNews(url string) ([]NewsItem, error) {
|
||||
resp, err := c.http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("finnhub: HTTP %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var items []NewsItem
|
||||
if err := json.NewDecoder(resp.Body).Decode(&items); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
package finnhub
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
)
|
||||
|
||||
type Poller struct {
|
||||
db *db.DB
|
||||
getKey func() (string, error)
|
||||
ticker *time.Ticker
|
||||
done chan struct{}
|
||||
lastRun time.Time
|
||||
}
|
||||
|
||||
func NewPoller(database *db.DB, getKey func() (string, error)) *Poller {
|
||||
return &Poller{
|
||||
db: database,
|
||||
getKey: getKey,
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Poller) Start() {
|
||||
p.ticker = time.NewTicker(15 * time.Minute)
|
||||
go func() {
|
||||
// Run immediately on start
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("finnhub poller: initial sync: %v", err)
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-p.ticker.C:
|
||||
if err := p.Sync(); err != nil {
|
||||
log.Printf("finnhub poller: sync: %v", err)
|
||||
}
|
||||
case <-p.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (p *Poller) Stop() {
|
||||
if p.ticker != nil {
|
||||
p.ticker.Stop()
|
||||
}
|
||||
close(p.done)
|
||||
}
|
||||
|
||||
func (p *Poller) Sync() error {
|
||||
apiKey, err := p.getKey()
|
||||
if err != nil || apiKey == "" {
|
||||
return nil // pas de clé configurée, on skip silencieusement
|
||||
}
|
||||
|
||||
client := New(apiKey)
|
||||
|
||||
tickers, err := p.watchlistTickers()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
from := now.AddDate(0, 0, -7).Format("2006-01-02")
|
||||
to := now.Format("2006-01-02")
|
||||
|
||||
total := 0
|
||||
for _, sym := range tickers {
|
||||
items, err := client.CompanyNews(sym, from, to)
|
||||
if err != nil {
|
||||
log.Printf("finnhub: news %s: %v", sym, err)
|
||||
continue
|
||||
}
|
||||
for _, item := range items {
|
||||
if p.insertNews(sym, item) {
|
||||
total++
|
||||
}
|
||||
}
|
||||
time.Sleep(250 * time.Millisecond) // Finnhub free tier: 60 req/min
|
||||
}
|
||||
|
||||
// News marché général (sans ticker spécifique)
|
||||
market, err := client.MarketNews()
|
||||
if err == nil {
|
||||
for _, item := range market {
|
||||
p.insertNews("", item)
|
||||
}
|
||||
}
|
||||
|
||||
p.lastRun = now
|
||||
if total > 0 {
|
||||
log.Printf("finnhub: sync done — %d nouvelles news", total)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Poller) LastRun() time.Time { return p.lastRun }
|
||||
|
||||
func (p *Poller) watchlistTickers() ([]string, error) {
|
||||
rows, err := p.db.Query(`SELECT ticker FROM watchlist WHERE active=1`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tickers []string
|
||||
for rows.Next() {
|
||||
var t string
|
||||
if err := rows.Scan(&t); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tickers = append(tickers, t)
|
||||
}
|
||||
return tickers, nil
|
||||
}
|
||||
|
||||
func (p *Poller) insertNews(ticker string, item NewsItem) bool {
|
||||
published := time.Unix(item.Datetime, 0).UTC().Format(time.RFC3339)
|
||||
res, err := p.db.Exec(`
|
||||
INSERT OR IGNORE INTO news (finnhub_id, ticker, headline, source, url, published_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
`, item.ID, ticker, item.Headline, item.Source, item.URL, published)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
n, _ := res.RowsAffected()
|
||||
return n > 0
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
package indicators
|
||||
|
||||
// MACDResult contient MACD line, signal line et histogramme.
|
||||
type MACDResult struct {
|
||||
MACD float64
|
||||
Signal float64
|
||||
Histogram float64
|
||||
}
|
||||
|
||||
// MACD calcule le Moving Average Convergence Divergence (12/26/9 standard).
|
||||
// Retourne zéro-value si pas assez de données.
|
||||
func MACD(closes []float64) MACDResult {
|
||||
return MACDCustom(closes, 12, 26, 9)
|
||||
}
|
||||
|
||||
func MACDCustom(closes []float64, fast, slow, signal int) MACDResult {
|
||||
if len(closes) < slow+signal {
|
||||
return MACDResult{}
|
||||
}
|
||||
|
||||
emaFast := emaSlice(closes, fast)
|
||||
emaSlow := emaSlice(closes, slow)
|
||||
|
||||
// Aligner les deux séries (emaSlow est plus courte)
|
||||
offset := len(emaFast) - len(emaSlow)
|
||||
macdLine := make([]float64, len(emaSlow))
|
||||
for i := range emaSlow {
|
||||
macdLine[i] = emaFast[offset+i] - emaSlow[i]
|
||||
}
|
||||
|
||||
if len(macdLine) < signal {
|
||||
return MACDResult{}
|
||||
}
|
||||
|
||||
signalLine := emaSlice(macdLine, signal)
|
||||
last := macdLine[len(macdLine)-1]
|
||||
sig := signalLine[len(signalLine)-1]
|
||||
|
||||
return MACDResult{
|
||||
MACD: last,
|
||||
Signal: sig,
|
||||
Histogram: last - sig,
|
||||
}
|
||||
}
|
||||
|
||||
// SMA calcule la moyenne mobile simple sur les n dernières valeurs.
|
||||
func SMA(closes []float64, period int) float64 {
|
||||
if len(closes) < period {
|
||||
return 0
|
||||
}
|
||||
slice := closes[len(closes)-period:]
|
||||
sum := 0.0
|
||||
for _, v := range slice {
|
||||
sum += v
|
||||
}
|
||||
return sum / float64(period)
|
||||
}
|
||||
|
||||
// AvgVolume calcule le volume moyen sur les n dernières barres.
|
||||
func AvgVolume(volumes []int64, period int) int64 {
|
||||
if len(volumes) < period {
|
||||
period = len(volumes)
|
||||
}
|
||||
if period == 0 {
|
||||
return 0
|
||||
}
|
||||
slice := volumes[len(volumes)-period:]
|
||||
var sum int64
|
||||
for _, v := range slice {
|
||||
sum += v
|
||||
}
|
||||
return sum / int64(period)
|
||||
}
|
||||
|
||||
func emaSlice(data []float64, period int) []float64 {
|
||||
if len(data) < period {
|
||||
return nil
|
||||
}
|
||||
k := 2.0 / float64(period+1)
|
||||
|
||||
// Première valeur = SMA des `period` premières
|
||||
sum := 0.0
|
||||
for i := 0; i < period; i++ {
|
||||
sum += data[i]
|
||||
}
|
||||
ema := make([]float64, 0, len(data)-period+1)
|
||||
ema = append(ema, sum/float64(period))
|
||||
|
||||
for i := period; i < len(data); i++ {
|
||||
ema = append(ema, data[i]*k+ema[len(ema)-1]*(1-k))
|
||||
}
|
||||
return ema
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
package indicators
|
||||
|
||||
// RSI calcule le Relative Strength Index (Wilder's smoothing, période 14).
|
||||
// Retourne NaN si pas assez de données.
|
||||
func RSI(closes []float64, period int) float64 {
|
||||
if period <= 0 {
|
||||
period = 14
|
||||
}
|
||||
if len(closes) < period+1 {
|
||||
return -1
|
||||
}
|
||||
|
||||
var gains, losses float64
|
||||
for i := 1; i <= period; i++ {
|
||||
delta := closes[i] - closes[i-1]
|
||||
if delta > 0 {
|
||||
gains += delta
|
||||
} else {
|
||||
losses -= delta
|
||||
}
|
||||
}
|
||||
|
||||
avgGain := gains / float64(period)
|
||||
avgLoss := losses / float64(period)
|
||||
|
||||
// Wilder's smoothing pour le reste
|
||||
for i := period + 1; i < len(closes); i++ {
|
||||
delta := closes[i] - closes[i-1]
|
||||
if delta > 0 {
|
||||
avgGain = (avgGain*float64(period-1) + delta) / float64(period)
|
||||
avgLoss = (avgLoss * float64(period-1)) / float64(period)
|
||||
} else {
|
||||
avgGain = (avgGain * float64(period-1)) / float64(period)
|
||||
avgLoss = (avgLoss*float64(period-1) - delta) / float64(period)
|
||||
}
|
||||
}
|
||||
|
||||
if avgLoss == 0 {
|
||||
return 100
|
||||
}
|
||||
rs := avgGain / avgLoss
|
||||
return 100 - (100 / (1 + rs))
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"log"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/indicators"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/yahoo"
|
||||
)
|
||||
|
||||
// DiscoveryStatus expose l'avancement du scan en cours.
|
||||
type DiscoveryStatus struct {
|
||||
Running bool `json:"running"`
|
||||
Progress int `json:"progress"`
|
||||
Total int `json:"total"`
|
||||
Found int `json:"found"` // tickers avec score > 0
|
||||
LastRun time.Time `json:"last_run"`
|
||||
LastError string `json:"last_error,omitempty"`
|
||||
}
|
||||
|
||||
// DiscoveryScanner parcourt tout l'univers eToro pour trouver des opportunités.
|
||||
type DiscoveryScanner struct {
|
||||
db *db.DB
|
||||
yahoo *yahoo.Client
|
||||
|
||||
mu sync.Mutex
|
||||
running bool
|
||||
progress int
|
||||
total int
|
||||
found int
|
||||
lastRun time.Time
|
||||
lastError string
|
||||
}
|
||||
|
||||
func NewDiscovery(database *db.DB) *DiscoveryScanner {
|
||||
return &DiscoveryScanner{
|
||||
db: database,
|
||||
yahoo: yahoo.New(),
|
||||
}
|
||||
}
|
||||
|
||||
func (d *DiscoveryScanner) Status() DiscoveryStatus {
|
||||
d.mu.Lock()
|
||||
defer d.mu.Unlock()
|
||||
return DiscoveryStatus{
|
||||
Running: d.running,
|
||||
Progress: d.progress,
|
||||
Total: d.total,
|
||||
Found: d.found,
|
||||
LastRun: d.lastRun,
|
||||
LastError: d.lastError,
|
||||
}
|
||||
}
|
||||
|
||||
// Run lance le scan de découverte en arrière-plan.
|
||||
// Retourne false si un scan est déjà en cours.
|
||||
func (d *DiscoveryScanner) Run() bool {
|
||||
d.mu.Lock()
|
||||
if d.running {
|
||||
d.mu.Unlock()
|
||||
return false
|
||||
}
|
||||
d.running = true
|
||||
d.progress = 0
|
||||
d.found = 0
|
||||
d.lastError = ""
|
||||
d.mu.Unlock()
|
||||
|
||||
go d.scan()
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *DiscoveryScanner) scan() {
|
||||
defer func() {
|
||||
d.mu.Lock()
|
||||
d.running = false
|
||||
d.lastRun = time.Now()
|
||||
d.mu.Unlock()
|
||||
}()
|
||||
|
||||
tickers, err := d.etoroTickers()
|
||||
if err != nil {
|
||||
d.mu.Lock()
|
||||
d.lastError = err.Error()
|
||||
d.mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
d.mu.Lock()
|
||||
d.total = len(tickers)
|
||||
d.mu.Unlock()
|
||||
|
||||
log.Printf("discovery: démarrage scan %d tickers eToro…", len(tickers))
|
||||
|
||||
found := 0
|
||||
for i, sym := range tickers {
|
||||
score, alert, err := d.scanTicker(sym)
|
||||
if err == nil && score > 0 {
|
||||
found++
|
||||
}
|
||||
_ = alert
|
||||
|
||||
if (i+1)%50 == 0 {
|
||||
d.mu.Lock()
|
||||
d.progress = i + 1
|
||||
d.found = found
|
||||
d.mu.Unlock()
|
||||
log.Printf("discovery: %d/%d (opportunités: %d)", i+1, len(tickers), found)
|
||||
}
|
||||
|
||||
time.Sleep(120 * time.Millisecond) // ~8 req/s sur Yahoo Finance
|
||||
}
|
||||
|
||||
d.mu.Lock()
|
||||
d.progress = len(tickers)
|
||||
d.found = found
|
||||
d.mu.Unlock()
|
||||
|
||||
log.Printf("discovery: terminé — %d opportunités sur %d tickers", found, len(tickers))
|
||||
}
|
||||
|
||||
func (d *DiscoveryScanner) scanTicker(sym string) (score int, alert string, err error) {
|
||||
bars, err := d.yahoo.History(sym, 60)
|
||||
if err != nil || len(bars) < 20 {
|
||||
return 0, "", err
|
||||
}
|
||||
|
||||
closes := make([]float64, len(bars))
|
||||
volumes := make([]int64, len(bars))
|
||||
for i, b := range bars {
|
||||
closes[i] = b.Close
|
||||
volumes[i] = b.Volume
|
||||
}
|
||||
|
||||
last := bars[len(bars)-1]
|
||||
prevClose := bars[len(bars)-2].Close
|
||||
changePct := 0.0
|
||||
if prevClose > 0 {
|
||||
changePct = (last.Close-prevClose)/prevClose*100
|
||||
}
|
||||
|
||||
rsi := indicators.RSI(closes, 14)
|
||||
macdRes := indicators.MACD(closes)
|
||||
sma20 := indicators.SMA(closes, 20)
|
||||
sma50 := indicators.SMA(closes, 50)
|
||||
avgVol := indicators.AvgVolume(volumes, 20)
|
||||
|
||||
// 52 semaines depuis les barres reçues
|
||||
week52High, week52Low := highLow(closes)
|
||||
pctFromHigh := 0.0
|
||||
if week52High > 0 {
|
||||
pctFromHigh = (last.Close - week52High) / week52High * 100
|
||||
}
|
||||
|
||||
// Score simplifié (pas de insider/news pour la découverte — trop lent)
|
||||
score = computeScore(scoreInput{
|
||||
rsi: rsi,
|
||||
macd: macdRes,
|
||||
volume: last.Volume,
|
||||
avgVolume: avgVol,
|
||||
pctFromHigh: pctFromHigh,
|
||||
})
|
||||
|
||||
if score == 0 {
|
||||
return 0, "", nil
|
||||
}
|
||||
|
||||
alert = detectAlert(rsi, macdRes, last.Volume, avgVol, 0, pctFromHigh)
|
||||
|
||||
_, err = d.db.Exec(`
|
||||
INSERT INTO signals
|
||||
(ticker, price, change_pct, rsi14, macd, macd_signal, macd_hist,
|
||||
sma20, sma50, volume, avg_volume20,
|
||||
week52_high, week52_low, pct_from_high,
|
||||
score, on_etoro, alert, source, computed_at)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,1,?,?,CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(ticker) DO UPDATE SET
|
||||
price = excluded.price,
|
||||
change_pct = excluded.change_pct,
|
||||
rsi14 = excluded.rsi14,
|
||||
macd = excluded.macd,
|
||||
macd_signal = excluded.macd_signal,
|
||||
macd_hist = excluded.macd_hist,
|
||||
sma20 = excluded.sma20,
|
||||
sma50 = excluded.sma50,
|
||||
volume = excluded.volume,
|
||||
avg_volume20 = excluded.avg_volume20,
|
||||
week52_high = excluded.week52_high,
|
||||
week52_low = excluded.week52_low,
|
||||
pct_from_high = excluded.pct_from_high,
|
||||
score = excluded.score,
|
||||
on_etoro = 1,
|
||||
alert = excluded.alert,
|
||||
source = 'discovery',
|
||||
computed_at = CURRENT_TIMESTAMP
|
||||
`, sym, last.Close, changePct, rsi,
|
||||
macdRes.MACD, macdRes.Signal, macdRes.Histogram,
|
||||
sma20, sma50, last.Volume, avgVol,
|
||||
week52High, week52Low, pctFromHigh,
|
||||
score, alert, "discovery")
|
||||
|
||||
return score, alert, err
|
||||
}
|
||||
|
||||
func (d *DiscoveryScanner) etoroTickers() ([]string, error) {
|
||||
rows, err := d.db.Query(`SELECT ticker FROM instruments ORDER BY ticker`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tickers []string
|
||||
for rows.Next() {
|
||||
var t string
|
||||
if err := rows.Scan(&t); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tickers = append(tickers, t)
|
||||
}
|
||||
return tickers, nil
|
||||
}
|
||||
|
||||
func highLow(closes []float64) (high, low float64) {
|
||||
if len(closes) == 0 {
|
||||
return 0, 0
|
||||
}
|
||||
high, low = closes[0], closes[0]
|
||||
for _, v := range closes[1:] {
|
||||
if v > high {
|
||||
high = v
|
||||
}
|
||||
if v < low {
|
||||
low = v
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/indicators"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/yahoo"
|
||||
)
|
||||
|
||||
type Signal struct {
|
||||
Ticker string `json:"ticker"`
|
||||
Name string `json:"name"`
|
||||
Price float64 `json:"price"`
|
||||
ChangePct float64 `json:"change_pct"`
|
||||
RSI14 float64 `json:"rsi14"`
|
||||
MACD float64 `json:"macd"`
|
||||
MACDSignal float64 `json:"macd_signal"`
|
||||
MACDHist float64 `json:"macd_hist"`
|
||||
SMA20 float64 `json:"sma20"`
|
||||
SMA50 float64 `json:"sma50"`
|
||||
Volume int64 `json:"volume"`
|
||||
AvgVolume20 int64 `json:"avg_volume20"`
|
||||
MarketCap int64 `json:"market_cap"`
|
||||
ShortRatio float64 `json:"short_ratio"`
|
||||
Week52High float64 `json:"week52_high"`
|
||||
Week52Low float64 `json:"week52_low"`
|
||||
PctFromHigh float64 `json:"pct_from_high"` // négatif = % sous le 52w high
|
||||
InsiderValue30d float64 `json:"insider_value_30d"` // $ total d'achats insider sur 30j
|
||||
Score int `json:"score"`
|
||||
OnEtoro bool `json:"on_etoro"`
|
||||
Alert string `json:"alert"`
|
||||
ComputedAt string `json:"computed_at"`
|
||||
}
|
||||
|
||||
type Scanner struct {
|
||||
db *db.DB
|
||||
yahoo *yahoo.Client
|
||||
ticker *time.Ticker
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
func New(database *db.DB) *Scanner {
|
||||
return &Scanner{
|
||||
db: database,
|
||||
yahoo: yahoo.New(),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Scanner) Start() {
|
||||
s.ticker = time.NewTicker(30 * time.Minute)
|
||||
go func() {
|
||||
if err := s.Scan(); err != nil {
|
||||
log.Printf("scanner: initial scan: %v", err)
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-s.ticker.C:
|
||||
if err := s.Scan(); err != nil {
|
||||
log.Printf("scanner: scan: %v", err)
|
||||
}
|
||||
case <-s.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *Scanner) Stop() {
|
||||
if s.ticker != nil {
|
||||
s.ticker.Stop()
|
||||
}
|
||||
close(s.done)
|
||||
}
|
||||
|
||||
func (s *Scanner) Scan() error {
|
||||
tickers, err := s.watchlistTickers()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(tickers) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("scanner: scanning %d tickers…", len(tickers))
|
||||
ok := 0
|
||||
for _, sym := range tickers {
|
||||
if err := s.scanTicker(sym); err != nil {
|
||||
log.Printf("scanner: %s: %v", sym, err)
|
||||
continue
|
||||
}
|
||||
ok++
|
||||
time.Sleep(400 * time.Millisecond) // rate limit Yahoo
|
||||
}
|
||||
log.Printf("scanner: done — %d/%d ok", ok, len(tickers))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Scanner) scanTicker(sym string) error {
|
||||
bars, err := s.yahoo.History(sym, 100)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(bars) < 30 {
|
||||
return nil
|
||||
}
|
||||
|
||||
s.storePrices(sym, bars)
|
||||
|
||||
closes := make([]float64, len(bars))
|
||||
volumes := make([]int64, len(bars))
|
||||
for i, b := range bars {
|
||||
closes[i] = b.Close
|
||||
volumes[i] = b.Volume
|
||||
}
|
||||
|
||||
last := bars[len(bars)-1]
|
||||
prevClose := bars[len(bars)-2].Close
|
||||
|
||||
changePct := 0.0
|
||||
if prevClose > 0 {
|
||||
changePct = (last.Close - prevClose) / prevClose * 100
|
||||
}
|
||||
|
||||
rsi := indicators.RSI(closes, 14)
|
||||
macdRes := indicators.MACD(closes)
|
||||
sma20 := indicators.SMA(closes, 20)
|
||||
sma50 := indicators.SMA(closes, 50)
|
||||
avgVol := indicators.AvgVolume(volumes, 20)
|
||||
|
||||
// Market cap (on tolère l'erreur — pas bloquant)
|
||||
var marketCap int64
|
||||
var shortRatio float64
|
||||
if info, err := s.yahoo.GetMarketCap(sym); err == nil {
|
||||
marketCap = info.MarketCap
|
||||
shortRatio = info.ShortRatio
|
||||
}
|
||||
time.Sleep(150 * time.Millisecond)
|
||||
|
||||
// 52 semaines depuis les prix stockés
|
||||
week52High, week52Low := s.week52Range(sym)
|
||||
pctFromHigh := 0.0
|
||||
if week52High > 0 {
|
||||
pctFromHigh = (last.Close - week52High) / week52High * 100 // négatif
|
||||
}
|
||||
|
||||
// Insider buys sur 30 jours — par VALEUR
|
||||
insiderValue30d := s.insiderBuyValue30d(sym)
|
||||
insiderDays := s.lastInsiderBuyDays(sym)
|
||||
|
||||
// eToro universe check
|
||||
onEtoro := s.isOnEtoro(sym)
|
||||
|
||||
// Score composite
|
||||
score := computeScore(scoreInput{
|
||||
rsi: rsi,
|
||||
macd: macdRes,
|
||||
volume: last.Volume,
|
||||
avgVolume: avgVol,
|
||||
marketCap: marketCap,
|
||||
shortRatio: shortRatio,
|
||||
insiderDays: insiderDays,
|
||||
insiderValue30d: insiderValue30d,
|
||||
newsDays: s.lastPositiveNewsDays(sym),
|
||||
price: last.Close,
|
||||
sma20: sma20,
|
||||
sma50: sma50,
|
||||
pctFromHigh: pctFromHigh,
|
||||
})
|
||||
|
||||
alert := detectAlert(rsi, macdRes, last.Volume, avgVol, insiderValue30d, pctFromHigh)
|
||||
|
||||
_, err = s.db.Exec(`
|
||||
INSERT INTO signals
|
||||
(ticker, price, change_pct, rsi14, macd, macd_signal, macd_hist,
|
||||
sma20, sma50, volume, avg_volume20, market_cap, short_ratio,
|
||||
week52_high, week52_low, pct_from_high, insider_value_30d,
|
||||
score, on_etoro, alert, computed_at)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(ticker) DO UPDATE SET
|
||||
price = excluded.price,
|
||||
change_pct = excluded.change_pct,
|
||||
rsi14 = excluded.rsi14,
|
||||
macd = excluded.macd,
|
||||
macd_signal = excluded.macd_signal,
|
||||
macd_hist = excluded.macd_hist,
|
||||
sma20 = excluded.sma20,
|
||||
sma50 = excluded.sma50,
|
||||
volume = excluded.volume,
|
||||
avg_volume20 = excluded.avg_volume20,
|
||||
market_cap = excluded.market_cap,
|
||||
short_ratio = excluded.short_ratio,
|
||||
week52_high = excluded.week52_high,
|
||||
week52_low = excluded.week52_low,
|
||||
pct_from_high = excluded.pct_from_high,
|
||||
insider_value_30d = excluded.insider_value_30d,
|
||||
score = excluded.score,
|
||||
on_etoro = excluded.on_etoro,
|
||||
alert = excluded.alert,
|
||||
computed_at = CURRENT_TIMESTAMP
|
||||
`, sym, last.Close, changePct, rsi,
|
||||
macdRes.MACD, macdRes.Signal, macdRes.Histogram,
|
||||
sma20, sma50, last.Volume, avgVol,
|
||||
marketCap, shortRatio,
|
||||
week52High, week52Low, pctFromHigh, insiderValue30d,
|
||||
score, boolToInt(onEtoro), alert)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// ---- Scoring ----
|
||||
|
||||
type scoreInput struct {
|
||||
rsi float64
|
||||
macd indicators.MACDResult
|
||||
volume int64
|
||||
avgVolume int64
|
||||
marketCap int64
|
||||
shortRatio float64
|
||||
insiderDays int // jours depuis dernier insider buy (-1 = aucun)
|
||||
insiderValue30d float64 // $ total d'achats insider sur 30j
|
||||
newsDays int // jours depuis dernière news positive (-1 = aucune)
|
||||
price float64
|
||||
sma20 float64
|
||||
sma50 float64
|
||||
pctFromHigh float64 // % sous le 52w high (négatif)
|
||||
}
|
||||
|
||||
func computeScore(in scoreInput) int {
|
||||
score := 0
|
||||
|
||||
// RSI oversold recovery (0-20 pts)
|
||||
if in.rsi > 0 {
|
||||
if in.rsi >= 25 && in.rsi < 30 {
|
||||
score += 20 // profond oversold
|
||||
} else if in.rsi >= 30 && in.rsi < 40 {
|
||||
score += 15 // sortie d'oversold récente
|
||||
} else if in.rsi >= 40 && in.rsi < 50 {
|
||||
score += 8 // momentum neutre haussier
|
||||
}
|
||||
}
|
||||
|
||||
// MACD signal (0-15 pts)
|
||||
if in.macd.Histogram > 0 {
|
||||
if in.macd.MACD < 0 {
|
||||
score += 15 // cross haussier early signal (le meilleur)
|
||||
} else {
|
||||
score += 8 // momentum haussier confirmé
|
||||
}
|
||||
}
|
||||
|
||||
// Volume spike (0-15 pts)
|
||||
if in.avgVolume > 0 {
|
||||
ratio := float64(in.volume) / float64(in.avgVolume)
|
||||
if ratio >= 3.0 {
|
||||
score += 15
|
||||
} else if ratio >= 2.0 {
|
||||
score += 10
|
||||
} else if ratio >= 1.5 {
|
||||
score += 5
|
||||
}
|
||||
}
|
||||
|
||||
// Insider buy — pondéré par VALEUR (0-30 pts) ← le signal le plus fort
|
||||
if in.insiderValue30d > 0 {
|
||||
switch {
|
||||
case in.insiderValue30d >= 100_000_000: // ≥ $100M → signal exceptionnel (TTD CEO)
|
||||
score += 30
|
||||
case in.insiderValue30d >= 10_000_000: // ≥ $10M
|
||||
score += 22
|
||||
case in.insiderValue30d >= 1_000_000: // ≥ $1M
|
||||
score += 15
|
||||
case in.insiderValue30d >= 100_000: // ≥ $100K
|
||||
score += 8
|
||||
default:
|
||||
score += 3
|
||||
}
|
||||
// Bonus recency : si achat < 7 jours
|
||||
if in.insiderDays >= 0 && in.insiderDays <= 7 {
|
||||
score += 5
|
||||
}
|
||||
}
|
||||
|
||||
// News positive récente (0-10 pts)
|
||||
if in.newsDays >= 0 {
|
||||
if in.newsDays <= 3 {
|
||||
score += 10
|
||||
} else if in.newsDays <= 7 {
|
||||
score += 5
|
||||
}
|
||||
}
|
||||
|
||||
// Position sur 52 semaines (0-10 pts) — titre très déprimé = potentiel rebond
|
||||
if in.pctFromHigh < -40 {
|
||||
score += 10 // comme TTD à -54%
|
||||
} else if in.pctFromHigh < -25 {
|
||||
score += 6
|
||||
} else if in.pctFromHigh < -15 {
|
||||
score += 3
|
||||
}
|
||||
|
||||
// Small cap bonus (+5) — bouge plus fort
|
||||
if in.marketCap > 0 && in.marketCap < 2_000_000_000 {
|
||||
score += 5
|
||||
}
|
||||
|
||||
if score > 100 {
|
||||
score = 100
|
||||
}
|
||||
return score
|
||||
}
|
||||
|
||||
// ---- Helpers ----
|
||||
|
||||
func detectAlert(rsi float64, m indicators.MACDResult, vol, avgVol int64, insiderValue30d, pctFromHigh float64) string {
|
||||
// Priorité 1 : mega insider buy (signal le plus fort)
|
||||
if insiderValue30d >= 1_000_000 {
|
||||
return "mega_insider_buy"
|
||||
}
|
||||
// Priorité 2 : RSI oversold
|
||||
if rsi > 0 && rsi < 30 {
|
||||
return "oversold"
|
||||
}
|
||||
// Priorité 3 : MACD cross haussier
|
||||
if m.Histogram > 0 && m.MACD < 0 {
|
||||
return "macd_cross_up"
|
||||
}
|
||||
// Priorité 4 : volume spike
|
||||
if avgVol > 0 && float64(vol)/float64(avgVol) >= 3.0 {
|
||||
return "volume_spike"
|
||||
}
|
||||
// Priorité 5 : rebond depuis creux 52 semaines + RSI en remontée
|
||||
if pctFromHigh < -40 && rsi > 30 && rsi < 50 {
|
||||
return "deep_value_reversal"
|
||||
}
|
||||
if rsi > 70 {
|
||||
return "overbought"
|
||||
}
|
||||
if m.Histogram < 0 && m.MACD > 0 {
|
||||
return "macd_cross_down"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s *Scanner) week52Range(ticker string) (high, low float64) {
|
||||
cutoff := time.Now().AddDate(-1, 0, 0).Format("2006-01-02")
|
||||
row := s.db.QueryRow(`
|
||||
SELECT MAX(high), MIN(low) FROM prices
|
||||
WHERE ticker = ? AND date >= ?
|
||||
`, ticker, cutoff)
|
||||
row.Scan(&high, &low)
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Scanner) insiderBuyValue30d(ticker string) float64 {
|
||||
cutoff := time.Now().AddDate(0, 0, -30).Format("2006-01-02")
|
||||
var total float64
|
||||
s.db.QueryRow(`
|
||||
SELECT COALESCE(SUM(total_value), 0) FROM insider_trades
|
||||
WHERE ticker = ? AND transaction_code = 'P' AND transaction_date >= ?
|
||||
`, ticker, cutoff).Scan(&total)
|
||||
return total
|
||||
}
|
||||
|
||||
func (s *Scanner) isOnEtoro(ticker string) bool {
|
||||
var count int
|
||||
s.db.QueryRow(`SELECT COUNT(*) FROM instruments WHERE ticker = ?`, ticker).Scan(&count)
|
||||
return count > 0
|
||||
}
|
||||
|
||||
func (s *Scanner) lastInsiderBuyDays(ticker string) int {
|
||||
var dateStr string
|
||||
err := s.db.QueryRow(`
|
||||
SELECT transaction_date FROM insider_trades
|
||||
WHERE ticker = ? AND transaction_code = 'P'
|
||||
ORDER BY transaction_date DESC LIMIT 1
|
||||
`, ticker).Scan(&dateStr)
|
||||
if err != nil || dateStr == "" {
|
||||
return -1
|
||||
}
|
||||
t, err := time.Parse("2006-01-02", dateStr)
|
||||
if err != nil {
|
||||
return -1
|
||||
}
|
||||
return int(time.Since(t).Hours() / 24)
|
||||
}
|
||||
|
||||
func (s *Scanner) lastPositiveNewsDays(ticker string) int {
|
||||
var dateStr string
|
||||
err := s.db.QueryRow(`
|
||||
SELECT published_at FROM news
|
||||
WHERE ticker = ? AND sentiment = 'positive'
|
||||
ORDER BY published_at DESC LIMIT 1
|
||||
`, ticker).Scan(&dateStr)
|
||||
if err != nil || dateStr == "" {
|
||||
return -1
|
||||
}
|
||||
t, err := time.Parse(time.RFC3339, dateStr)
|
||||
if err != nil {
|
||||
return -1
|
||||
}
|
||||
return int(time.Since(t).Hours() / 24)
|
||||
}
|
||||
|
||||
func (s *Scanner) storePrices(ticker string, bars []yahoo.Bar) {
|
||||
for _, b := range bars {
|
||||
s.db.Exec(`
|
||||
INSERT OR IGNORE INTO prices (ticker, date, open, high, low, close, volume)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`, ticker, b.Date.Format("2006-01-02"), b.Open, b.High, b.Low, b.Close, b.Volume)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Scanner) watchlistTickers() ([]string, error) {
|
||||
rows, err := s.db.Query(`SELECT ticker FROM watchlist WHERE active=1`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var tickers []string
|
||||
for rows.Next() {
|
||||
var t string
|
||||
if err := rows.Scan(&t); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tickers = append(tickers, t)
|
||||
}
|
||||
return tickers, nil
|
||||
}
|
||||
|
||||
func boolToInt(b bool) int {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/scanner"
|
||||
)
|
||||
|
||||
// ---- eToro ----
|
||||
|
||||
func (s *Server) handleSyncEtoro(w http.ResponseWriter, r *http.Request) {
|
||||
go func() { s.etoroPoller.Sync() }()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"status":"syncing"}`))
|
||||
}
|
||||
|
||||
func (s *Server) handleEtoroStatus(w http.ResponseWriter, r *http.Request) {
|
||||
status := s.etoroPoller.Status()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(status)
|
||||
}
|
||||
|
||||
// ---- Discovery ----
|
||||
|
||||
func (s *Server) handleRunDiscovery(w http.ResponseWriter, r *http.Request) {
|
||||
started := s.discovery.Run()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if started {
|
||||
w.Write([]byte(`{"status":"started"}`))
|
||||
} else {
|
||||
w.Write([]byte(`{"status":"already_running"}`))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) handleDiscoveryStatus(w http.ResponseWriter, r *http.Request) {
|
||||
status := s.discovery.Status()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(status)
|
||||
}
|
||||
|
||||
func (s *Server) handleGetDiscovery(w http.ResponseWriter, r *http.Request) {
|
||||
minScore := r.URL.Query().Get("min_score")
|
||||
if minScore == "" {
|
||||
minScore = "30"
|
||||
}
|
||||
|
||||
rows, err := s.db.Query(`
|
||||
SELECT sig.ticker, COALESCE(inst.name, sig.ticker),
|
||||
sig.price, sig.change_pct, sig.rsi14,
|
||||
sig.macd_hist, sig.volume, sig.avg_volume20,
|
||||
COALESCE(sig.week52_high, 0), COALESCE(sig.pct_from_high, 0),
|
||||
COALESCE(sig.market_cap, 0),
|
||||
COALESCE(sig.score, 0), COALESCE(sig.alert,''), sig.computed_at
|
||||
FROM signals sig
|
||||
LEFT JOIN instruments inst ON inst.ticker = sig.ticker
|
||||
WHERE sig.source = 'discovery'
|
||||
AND sig.on_etoro = 1
|
||||
AND sig.score >= ?
|
||||
ORDER BY sig.score DESC
|
||||
LIMIT 200
|
||||
`, minScore)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type discoveryRow struct {
|
||||
Ticker string `json:"ticker"`
|
||||
Name string `json:"name"`
|
||||
Price float64 `json:"price"`
|
||||
ChangePct float64 `json:"change_pct"`
|
||||
RSI14 float64 `json:"rsi14"`
|
||||
MACDHist float64 `json:"macd_hist"`
|
||||
Volume int64 `json:"volume"`
|
||||
AvgVolume20 int64 `json:"avg_volume20"`
|
||||
Week52High float64 `json:"week52_high"`
|
||||
PctFromHigh float64 `json:"pct_from_high"`
|
||||
MarketCap int64 `json:"market_cap"`
|
||||
Score int `json:"score"`
|
||||
Alert string `json:"alert"`
|
||||
ComputedAt string `json:"computed_at"`
|
||||
}
|
||||
|
||||
results := []discoveryRow{}
|
||||
for rows.Next() {
|
||||
var row discoveryRow
|
||||
var vol sql.NullInt64
|
||||
var avg sql.NullInt64
|
||||
if err := rows.Scan(
|
||||
&row.Ticker, &row.Name,
|
||||
&row.Price, &row.ChangePct, &row.RSI14,
|
||||
&row.MACDHist, &vol, &avg,
|
||||
&row.Week52High, &row.PctFromHigh,
|
||||
&row.MarketCap,
|
||||
&row.Score, &row.Alert, &row.ComputedAt,
|
||||
); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if vol.Valid {
|
||||
row.Volume = vol.Int64
|
||||
}
|
||||
if avg.Valid {
|
||||
row.AvgVolume20 = avg.Int64
|
||||
}
|
||||
results = append(results, row)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(results)
|
||||
}
|
||||
|
||||
// handleEtoroStats garde la compatibilité avec l'ancien endpoint
|
||||
func (s *Server) handleEtoroStats(w http.ResponseWriter, r *http.Request) {
|
||||
s.handleEtoroStatus(w, r)
|
||||
}
|
||||
|
||||
// Scan watchlist signal - déjà dans handlers_scanner.go, on ajoute juste
|
||||
// un champ source à la query existante
|
||||
|
||||
func signalFromRow(rows interface {
|
||||
Scan(...any) error
|
||||
}) (scanner.Signal, int, error) {
|
||||
var sig scanner.Signal
|
||||
var onEtoro int
|
||||
err := rows.Scan(
|
||||
&sig.Ticker, &sig.Name,
|
||||
&sig.Price, &sig.ChangePct,
|
||||
&sig.RSI14, &sig.MACD, &sig.MACDSignal, &sig.MACDHist,
|
||||
&sig.SMA20, &sig.SMA50, &sig.Volume, &sig.AvgVolume20,
|
||||
&sig.MarketCap, &sig.ShortRatio,
|
||||
&sig.Week52High, &sig.Week52Low,
|
||||
&sig.PctFromHigh, &sig.InsiderValue30d,
|
||||
&sig.Score, &onEtoro,
|
||||
&sig.Alert, &sig.ComputedAt,
|
||||
)
|
||||
return sig, onEtoro, err
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type insiderTradeRow struct {
|
||||
ID int `json:"id"`
|
||||
Ticker string `json:"ticker"`
|
||||
InsiderName string `json:"insider_name"`
|
||||
InsiderTitle string `json:"insider_title"`
|
||||
TransactionCode string `json:"transaction_code"`
|
||||
Shares float64 `json:"shares"`
|
||||
Price float64 `json:"price"`
|
||||
TotalValue float64 `json:"total_value"`
|
||||
TransactionDate string `json:"transaction_date"`
|
||||
FilingURL string `json:"filing_url"`
|
||||
}
|
||||
|
||||
func (s *Server) handleGetInsiderTrades(w http.ResponseWriter, r *http.Request) {
|
||||
ticker := r.URL.Query().Get("ticker")
|
||||
|
||||
base := `
|
||||
SELECT id, ticker, COALESCE(insider_name,''), COALESCE(insider_title,''),
|
||||
COALESCE(transaction_code,''), COALESCE(shares,0), COALESCE(price,0),
|
||||
COALESCE(total_value,0), COALESCE(transaction_date,''), COALESCE(filing_url,'')
|
||||
FROM insider_trades`
|
||||
|
||||
var rows *sql.Rows
|
||||
var err error
|
||||
if ticker != "" {
|
||||
rows, err = s.db.Query(base+` WHERE ticker = ? ORDER BY transaction_date DESC LIMIT 100`, ticker)
|
||||
} else {
|
||||
rows, err = s.db.Query(base + ` ORDER BY transaction_date DESC LIMIT 200`)
|
||||
}
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
trades := []insiderTradeRow{}
|
||||
for rows.Next() {
|
||||
var t insiderTradeRow
|
||||
if err := rows.Scan(
|
||||
&t.ID, &t.Ticker, &t.InsiderName, &t.InsiderTitle,
|
||||
&t.TransactionCode, &t.Shares, &t.Price, &t.TotalValue,
|
||||
&t.TransactionDate, &t.FilingURL,
|
||||
); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
trades = append(trades, t)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(trades)
|
||||
}
|
||||
|
||||
func (s *Server) handleSyncInsider(w http.ResponseWriter, r *http.Request) {
|
||||
go func() { s.edgarPoller.Sync() }()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"status":"syncing"}`))
|
||||
}
|
||||
@@ -6,6 +6,17 @@ import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func (s *Server) handleNewsSync(w http.ResponseWriter, r *http.Request) {
|
||||
go func() {
|
||||
if err := s.poller.Sync(); err != nil {
|
||||
// logged inside Sync()
|
||||
_ = err
|
||||
}
|
||||
}()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"status":"syncing"}`))
|
||||
}
|
||||
|
||||
type newsItem struct {
|
||||
ID int `json:"id"`
|
||||
Ticker string `json:"ticker"`
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/scanner"
|
||||
)
|
||||
|
||||
func (s *Server) handleGetSignals(w http.ResponseWriter, r *http.Request) {
|
||||
onlyEtoro := r.URL.Query().Get("etoro") == "1"
|
||||
|
||||
query := `
|
||||
SELECT sig.ticker, COALESCE(inst.name, sig.ticker),
|
||||
sig.price, sig.change_pct, sig.rsi14,
|
||||
sig.macd, sig.macd_signal, sig.macd_hist,
|
||||
sig.sma20, sig.sma50, sig.volume, sig.avg_volume20,
|
||||
COALESCE(sig.market_cap, 0), COALESCE(sig.short_ratio, 0),
|
||||
COALESCE(sig.week52_high, 0), COALESCE(sig.week52_low, 0),
|
||||
COALESCE(sig.pct_from_high, 0), COALESCE(sig.insider_value_30d, 0),
|
||||
COALESCE(sig.score, 0), COALESCE(sig.on_etoro, 0),
|
||||
COALESCE(sig.alert,''), sig.computed_at
|
||||
FROM signals sig
|
||||
LEFT JOIN instruments inst ON inst.ticker = sig.ticker`
|
||||
|
||||
if onlyEtoro {
|
||||
query += ` WHERE sig.on_etoro = 1`
|
||||
}
|
||||
query += ` ORDER BY sig.score DESC, CASE WHEN sig.alert != '' THEN 0 ELSE 1 END`
|
||||
|
||||
rows, err := s.db.Query(query)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
signals := []scanner.Signal{}
|
||||
for rows.Next() {
|
||||
var sig scanner.Signal
|
||||
var onEtoro int
|
||||
if err := rows.Scan(
|
||||
&sig.Ticker, &sig.Name,
|
||||
&sig.Price, &sig.ChangePct,
|
||||
&sig.RSI14, &sig.MACD, &sig.MACDSignal, &sig.MACDHist,
|
||||
&sig.SMA20, &sig.SMA50, &sig.Volume, &sig.AvgVolume20,
|
||||
&sig.MarketCap, &sig.ShortRatio,
|
||||
&sig.Week52High, &sig.Week52Low,
|
||||
&sig.PctFromHigh, &sig.InsiderValue30d,
|
||||
&sig.Score, &onEtoro,
|
||||
&sig.Alert, &sig.ComputedAt,
|
||||
); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
sig.OnEtoro = onEtoro == 1
|
||||
signals = append(signals, sig)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(signals)
|
||||
}
|
||||
|
||||
func (s *Server) handleTriggerScan(w http.ResponseWriter, r *http.Request) {
|
||||
go func() { s.scanner.Scan() }()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{"status":"scanning"}`))
|
||||
}
|
||||
|
||||
func (s *Server) handleGetPrices(w http.ResponseWriter, r *http.Request) {
|
||||
ticker := r.URL.Query().Get("ticker")
|
||||
if ticker == "" {
|
||||
http.Error(w, "ticker required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
rows, err := s.db.Query(`
|
||||
SELECT date, open, high, low, close, volume
|
||||
FROM prices WHERE ticker = ?
|
||||
ORDER BY date ASC LIMIT 90
|
||||
`, ticker)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type bar struct {
|
||||
Date string `json:"date"`
|
||||
Open float64 `json:"open"`
|
||||
High float64 `json:"high"`
|
||||
Low float64 `json:"low"`
|
||||
Close float64 `json:"close"`
|
||||
Volume int64 `json:"volume"`
|
||||
}
|
||||
|
||||
bars := []bar{}
|
||||
for rows.Next() {
|
||||
var b bar
|
||||
var vol sql.NullInt64
|
||||
if err := rows.Scan(&b.Date, &b.Open, &b.High, &b.Low, &b.Close, &vol); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if vol.Valid {
|
||||
b.Volume = vol.Int64
|
||||
}
|
||||
bars = append(bars, b)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(bars)
|
||||
}
|
||||
|
||||
@@ -6,15 +6,24 @@ import (
|
||||
"net/http"
|
||||
|
||||
"git.rouggy.com/rouggy/stockradar/internal/db"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/edgar"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/etoro"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/finnhub"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/scanner"
|
||||
"git.rouggy.com/rouggy/stockradar/internal/settings"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
db *db.DB
|
||||
port string
|
||||
router *mux.Router
|
||||
settings *settings.Settings
|
||||
db *db.DB
|
||||
port string
|
||||
router *mux.Router
|
||||
settings *settings.Settings
|
||||
poller *finnhub.Poller
|
||||
scanner *scanner.Scanner
|
||||
discovery *scanner.DiscoveryScanner
|
||||
edgarPoller *edgar.Poller
|
||||
etoroPoller *etoro.Poller
|
||||
}
|
||||
|
||||
func New(database *db.DB, port string) (*Server, error) {
|
||||
@@ -29,6 +38,23 @@ func New(database *db.DB, port string) (*Server, error) {
|
||||
router: mux.NewRouter(),
|
||||
settings: svc,
|
||||
}
|
||||
|
||||
s.poller = finnhub.NewPoller(database, func() (string, error) {
|
||||
return svc.Get("finnhub_api_key")
|
||||
})
|
||||
s.poller.Start()
|
||||
|
||||
s.scanner = scanner.New(database)
|
||||
s.scanner.Start()
|
||||
|
||||
s.discovery = scanner.NewDiscovery(database)
|
||||
|
||||
s.edgarPoller = edgar.NewPoller(database)
|
||||
s.edgarPoller.Start()
|
||||
|
||||
s.etoroPoller = etoro.NewPoller(database)
|
||||
s.etoroPoller.Start()
|
||||
|
||||
s.setupRoutes()
|
||||
return s, nil
|
||||
}
|
||||
@@ -51,6 +77,25 @@ func (s *Server) setupRoutes() {
|
||||
|
||||
// News
|
||||
api.HandleFunc("/news", s.handleGetNews).Methods("GET", "OPTIONS")
|
||||
api.HandleFunc("/news/sync", s.handleNewsSync).Methods("POST", "OPTIONS")
|
||||
|
||||
// Scanner / Signals
|
||||
api.HandleFunc("/signals", s.handleGetSignals).Methods("GET", "OPTIONS")
|
||||
api.HandleFunc("/signals/scan", s.handleTriggerScan).Methods("POST", "OPTIONS")
|
||||
api.HandleFunc("/prices", s.handleGetPrices).Methods("GET", "OPTIONS")
|
||||
|
||||
// Insider trades (SEC EDGAR)
|
||||
api.HandleFunc("/insider-trades", s.handleGetInsiderTrades).Methods("GET", "OPTIONS")
|
||||
api.HandleFunc("/insider-trades/sync", s.handleSyncInsider).Methods("POST", "OPTIONS")
|
||||
|
||||
// eToro universe
|
||||
api.HandleFunc("/etoro/sync", s.handleSyncEtoro).Methods("POST", "OPTIONS")
|
||||
api.HandleFunc("/etoro/status", s.handleEtoroStatus).Methods("GET", "OPTIONS")
|
||||
|
||||
// Discovery
|
||||
api.HandleFunc("/discover", s.handleGetDiscovery).Methods("GET", "OPTIONS")
|
||||
api.HandleFunc("/discover/run", s.handleRunDiscovery).Methods("POST", "OPTIONS")
|
||||
api.HandleFunc("/discover/status", s.handleDiscoveryStatus).Methods("GET", "OPTIONS")
|
||||
|
||||
s.router.PathPrefix("/").HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "StockRadar API running")
|
||||
@@ -137,8 +182,19 @@ func (s *Server) handleTestKey(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Pour l'instant on vérifie juste que la clé existe
|
||||
// On branchera le vrai ping API plus tard
|
||||
if provider == "finnhub" {
|
||||
apiKey, err := s.settings.Get(keyName)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if err := finnhub.New(apiKey).Ping(); err != nil {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
fmt.Fprintf(w, `{"status":"error","message":%q}`, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
fmt.Fprintf(w, `{"status":"ok","provider":"%s"}`, provider)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,245 @@
|
||||
package yahoo
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
const baseURL = "https://query1.finance.yahoo.com/v8/finance/chart"
|
||||
const summaryURL = "https://query1.finance.yahoo.com/v10/finance/quoteSummary"
|
||||
|
||||
type Client struct {
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
type Bar struct {
|
||||
Date time.Time
|
||||
Open float64
|
||||
High float64
|
||||
Low float64
|
||||
Close float64
|
||||
Volume int64
|
||||
}
|
||||
|
||||
type Quote struct {
|
||||
Symbol string
|
||||
Price float64
|
||||
PrevClose float64
|
||||
ChangePct float64
|
||||
}
|
||||
|
||||
type chartResponse struct {
|
||||
Chart struct {
|
||||
Result []struct {
|
||||
Meta struct {
|
||||
Symbol string `json:"symbol"`
|
||||
RegularMarketPrice float64 `json:"regularMarketPrice"`
|
||||
PreviousClose float64 `json:"previousClose"`
|
||||
} `json:"meta"`
|
||||
Timestamps []int64 `json:"timestamp"`
|
||||
Indicators struct {
|
||||
Quote []struct {
|
||||
Open []float64 `json:"open"`
|
||||
High []float64 `json:"high"`
|
||||
Low []float64 `json:"low"`
|
||||
Close []float64 `json:"close"`
|
||||
Volume []int64 `json:"volume"`
|
||||
} `json:"quote"`
|
||||
} `json:"indicators"`
|
||||
} `json:"result"`
|
||||
Error *struct {
|
||||
Code string `json:"code"`
|
||||
Description string `json:"description"`
|
||||
} `json:"error"`
|
||||
} `json:"chart"`
|
||||
}
|
||||
|
||||
func New() *Client {
|
||||
return &Client{
|
||||
http: &http.Client{Timeout: 10 * time.Second},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) History(symbol string, days int) ([]Bar, error) {
|
||||
rangeStr := "3mo"
|
||||
if days > 90 {
|
||||
rangeStr = "6mo"
|
||||
}
|
||||
url := fmt.Sprintf("%s/%s?interval=1d&range=%s", baseURL, symbol, rangeStr)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("yahoo: HTTP %d for %s", resp.StatusCode, symbol)
|
||||
}
|
||||
|
||||
var data chartResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if data.Chart.Error != nil {
|
||||
return nil, fmt.Errorf("yahoo: %s — %s", data.Chart.Error.Code, data.Chart.Error.Description)
|
||||
}
|
||||
if len(data.Chart.Result) == 0 {
|
||||
return nil, fmt.Errorf("yahoo: no data for %s", symbol)
|
||||
}
|
||||
|
||||
result := data.Chart.Result[0]
|
||||
quotes := result.Indicators.Quote
|
||||
if len(quotes) == 0 {
|
||||
return nil, fmt.Errorf("yahoo: empty quotes for %s", symbol)
|
||||
}
|
||||
q := quotes[0]
|
||||
|
||||
bars := make([]Bar, 0, len(result.Timestamps))
|
||||
for i, ts := range result.Timestamps {
|
||||
if i >= len(q.Close) || q.Close[i] == 0 {
|
||||
continue
|
||||
}
|
||||
bar := Bar{
|
||||
Date: time.Unix(ts, 0).UTC(),
|
||||
Close: safeFloat(q.Close, i),
|
||||
Open: safeFloat(q.Open, i),
|
||||
High: safeFloat(q.High, i),
|
||||
Low: safeFloat(q.Low, i),
|
||||
Volume: safeInt(q.Volume, i),
|
||||
}
|
||||
bars = append(bars, bar)
|
||||
}
|
||||
return bars, nil
|
||||
}
|
||||
|
||||
func (c *Client) GetQuote(symbol string) (*Quote, error) {
|
||||
url := fmt.Sprintf("%s/%s?interval=1d&range=5d", baseURL, symbol)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var data chartResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(data.Chart.Result) == 0 {
|
||||
return nil, fmt.Errorf("yahoo: no result for %s", symbol)
|
||||
}
|
||||
|
||||
meta := data.Chart.Result[0].Meta
|
||||
changePct := 0.0
|
||||
if meta.PreviousClose > 0 {
|
||||
changePct = (meta.RegularMarketPrice - meta.PreviousClose) / meta.PreviousClose * 100
|
||||
}
|
||||
|
||||
return &Quote{
|
||||
Symbol: meta.Symbol,
|
||||
Price: meta.RegularMarketPrice,
|
||||
PrevClose: meta.PreviousClose,
|
||||
ChangePct: changePct,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// MarketCapInfo contient les données fondamentales clés.
|
||||
type MarketCapInfo struct {
|
||||
MarketCap int64 // en USD
|
||||
FloatShares int64
|
||||
ShortRatio float64
|
||||
ForwardPE float64
|
||||
}
|
||||
|
||||
type quoteSummaryResponse struct {
|
||||
QuoteSummary struct {
|
||||
Result []struct {
|
||||
SummaryDetail struct {
|
||||
MarketCap struct {
|
||||
Raw int64 `json:"raw"`
|
||||
} `json:"marketCap"`
|
||||
ForwardPE struct {
|
||||
Raw float64 `json:"raw"`
|
||||
} `json:"forwardPE"`
|
||||
} `json:"summaryDetail"`
|
||||
DefaultKeyStatistics struct {
|
||||
FloatShares struct {
|
||||
Raw int64 `json:"raw"`
|
||||
} `json:"floatShares"`
|
||||
ShortRatio struct {
|
||||
Raw float64 `json:"raw"`
|
||||
} `json:"shortRatio"`
|
||||
} `json:"defaultKeyStatistics"`
|
||||
} `json:"result"`
|
||||
} `json:"quoteSummary"`
|
||||
}
|
||||
|
||||
// GetMarketCap retourne les données fondamentales d'un ticker.
|
||||
func (c *Client) GetMarketCap(symbol string) (*MarketCapInfo, error) {
|
||||
url := fmt.Sprintf("%s/%s?modules=summaryDetail,defaultKeyStatistics", summaryURL, symbol)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("yahoo quoteSummary: HTTP %d for %s", resp.StatusCode, symbol)
|
||||
}
|
||||
|
||||
var data quoteSummaryResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
results := data.QuoteSummary.Result
|
||||
if len(results) == 0 {
|
||||
return nil, fmt.Errorf("yahoo: no summary for %s", symbol)
|
||||
}
|
||||
|
||||
r := results[0]
|
||||
return &MarketCapInfo{
|
||||
MarketCap: r.SummaryDetail.MarketCap.Raw,
|
||||
FloatShares: r.DefaultKeyStatistics.FloatShares.Raw,
|
||||
ShortRatio: r.DefaultKeyStatistics.ShortRatio.Raw,
|
||||
ForwardPE: r.SummaryDetail.ForwardPE.Raw,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func safeFloat(s []float64, i int) float64 {
|
||||
if i < len(s) {
|
||||
return s[i]
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func safeInt(s []int64, i int) int64 {
|
||||
if i < len(s) {
|
||||
return s[i]
|
||||
}
|
||||
return 0
|
||||
}
|
||||
Reference in New Issue
Block a user