188 lines
6.4 KiB
D
188 lines
6.4 KiB
D
module patch.copyurl;
|
|
|
|
import st;
|
|
import x;
|
|
import config;
|
|
import patches;
|
|
import core.stdc.stdio;
|
|
import core.stdc.stdlib;
|
|
import core.stdc.string;
|
|
import std.string : toStringz;
|
|
import deimos.X11.X : CurrentTime;
|
|
|
|
static if (isPatchEnabled!"COPYURL_HIGHLIGHT_SELECTED_URLS_PATCH") {
|
|
void tsetcolor(int row, int start, int end, uint fg, uint bg) {
|
|
for (int i = start; i < end; ++i) {
|
|
term.line[row][i].fg = fg;
|
|
term.line[row][i].bg = bg;
|
|
}
|
|
}
|
|
|
|
char* findlastany(char* str, const(char*)* find, size_t len) {
|
|
char* found = null;
|
|
for (found = str + strlen(str) - 1; found >= str; --found) {
|
|
for (int i = 0; i < len; i++) {
|
|
if (strncmp(found, find[i], strlen(find[i])) == 0) {
|
|
return found;
|
|
}
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
/*
|
|
** Select and copy the previous url on screen (do nothing if there's no url).
|
|
**
|
|
** FIXME: doesn't handle urls that span multiple lines; will need to add support
|
|
** for multiline "getsel()" first
|
|
*/
|
|
extern(C) void copyurl(const(Arg)* arg) {
|
|
/* () and [] can appear in urls, but excluding them here will reduce false
|
|
* positives when figuring out where a given url ends.
|
|
*/
|
|
static immutable char[] URLCHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ~
|
|
"abcdefghijklmnopqrstuvwxyz" ~
|
|
"0123456789-._~:/?#@!$&'*+,;=%";
|
|
|
|
static immutable const(char)*[2] URLSTRINGS = [
|
|
cast(const(char)*)"http://".ptr,
|
|
cast(const(char)*)"https://".ptr
|
|
];
|
|
|
|
/* remove highlighting from previous selection if any */
|
|
if (sel.ob.x >= 0 && sel.oe.x >= 0)
|
|
tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, config.defaultfg, config.defaultbg);
|
|
|
|
int i = 0;
|
|
int row = 0; /* row of current URL */
|
|
int col = 0; /* column of current URL start */
|
|
int startrow = 0; /* row of last occurrence */
|
|
int colend = 0; /* column of last occurrence */
|
|
int passes = 0; /* how many rows have been scanned */
|
|
|
|
char* linestr = cast(char*)calloc(term.col + 1, char.sizeof);
|
|
char* c = null;
|
|
char* match = null;
|
|
|
|
row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
|
|
row = (row < term.top) ? term.top : (row > term.bot) ? term.bot : row;
|
|
startrow = row;
|
|
|
|
colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
|
|
colend = (colend < 0) ? 0 : (colend > term.col) ? term.col : colend;
|
|
|
|
/*
|
|
** Scan from (term.bot,term.col) to (0,0) and find
|
|
** next occurrance of a URL
|
|
*/
|
|
while (passes != term.bot + 2) {
|
|
/* Read in each column of every row until
|
|
** we hit previous occurrence of URL
|
|
*/
|
|
for (col = 0, i = 0; col < colend; ++col, ++i) {
|
|
linestr[i] = cast(char)term.line[row][col].u;
|
|
}
|
|
linestr[term.col] = '\0';
|
|
|
|
if ((match = findlastany(linestr, URLSTRINGS.ptr, URLSTRINGS.length)) !is null)
|
|
break;
|
|
|
|
if (--row < term.top)
|
|
row = term.bot;
|
|
|
|
colend = term.col;
|
|
passes++;
|
|
}
|
|
|
|
if (match !is null) {
|
|
/* must happen before trim */
|
|
selclear();
|
|
sel.ob.x = cast(int)(strlen(linestr) - strlen(match));
|
|
|
|
/* trim the rest of the line from the url match */
|
|
for (c = match; *c != '\0'; ++c) {
|
|
if (strchr(URLCHARS.ptr, *c) is null) {
|
|
*c = '\0';
|
|
break;
|
|
}
|
|
}
|
|
|
|
/* highlight selection by inverting terminal colors */
|
|
tsetcolor(row, sel.ob.x, sel.ob.x + cast(int)strlen(match), config.defaultbg, config.defaultfg);
|
|
|
|
/* select and copy */
|
|
sel.mode = SelMode.READY;
|
|
sel.type = SelType.REGULAR;
|
|
sel.oe.x = sel.ob.x + cast(int)strlen(match) - 1;
|
|
sel.ob.y = sel.oe.y = row;
|
|
selnormalize();
|
|
tsetdirt(sel.nb.y, sel.ne.y);
|
|
setsel(getsel(), CurrentTime);
|
|
xclipcopy();
|
|
}
|
|
|
|
free(linestr);
|
|
}
|
|
|
|
} else {
|
|
/* select and copy the previous url on screen (do nothing if there's no url).
|
|
* known bug: doesn't handle urls that span multiple lines (wontfix), depends on multiline "getsel()"
|
|
* known bug: only finds first url on line (mightfix)
|
|
*/
|
|
extern(C) void copyurl(const(Arg)* arg) {
|
|
/* () and [] can appear in urls, but excluding them here will reduce false
|
|
* positives when figuring out where a given url ends.
|
|
*/
|
|
static immutable char[] URLCHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ~
|
|
"abcdefghijklmnopqrstuvwxyz" ~
|
|
"0123456789-._~:/?#@!$&'*+,;=%";
|
|
|
|
int i, row, startrow;
|
|
char* linestr = cast(char*)calloc(term.col + 1, char.sizeof);
|
|
char* c;
|
|
char* match = null;
|
|
|
|
row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y - 1 : term.bot;
|
|
row = (row < term.top) ? term.top : (row > term.bot) ? term.bot : row;
|
|
startrow = row;
|
|
|
|
/* find the start of the last url before selection */
|
|
do {
|
|
for (i = 0; i < term.col; ++i) {
|
|
linestr[i] = cast(char)term.line[row][i].u;
|
|
}
|
|
linestr[term.col] = '\0';
|
|
if ((match = strstr(linestr, "http://")) !is null ||
|
|
(match = strstr(linestr, "https://")) !is null)
|
|
break;
|
|
if (--row < term.top)
|
|
row = term.bot;
|
|
} while (row != startrow);
|
|
|
|
if (match !is null) {
|
|
/* must happen before trim */
|
|
selclear();
|
|
sel.ob.x = cast(int)(strlen(linestr) - strlen(match));
|
|
|
|
/* trim the rest of the line from the url match */
|
|
for (c = match; *c != '\0'; ++c) {
|
|
if (strchr(URLCHARS.ptr, *c) is null) {
|
|
*c = '\0';
|
|
break;
|
|
}
|
|
}
|
|
|
|
/* select and copy */
|
|
sel.mode = SelMode.READY;
|
|
sel.type = SelType.REGULAR;
|
|
sel.oe.x = sel.ob.x + cast(int)strlen(match) - 1;
|
|
sel.ob.y = sel.oe.y = row;
|
|
selnormalize();
|
|
tsetdirt(sel.nb.y, sel.ne.y);
|
|
setsel(getsel(), CurrentTime);
|
|
xclipcopy();
|
|
}
|
|
|
|
free(linestr);
|
|
}
|
|
} |