mirror of
https://github.com/kittywitch/nixfiles.git
synced 2026-02-09 04:19:19 -08:00
feat: conky is eating my life
This commit is contained in:
parent
b7c510de28
commit
34bc9ae04e
15 changed files with 1830 additions and 98 deletions
6
flake.lock
generated
6
flake.lock
generated
|
|
@ -1090,11 +1090,11 @@
|
|||
},
|
||||
"nixpkgs_7": {
|
||||
"locked": {
|
||||
"lastModified": 1741851582,
|
||||
"narHash": "sha256-cPfs8qMccim2RBgtKGF+x9IBCduRvd/N5F4nYpU0TVE=",
|
||||
"lastModified": 1742069588,
|
||||
"narHash": "sha256-C7jVfohcGzdZRF6DO+ybyG/sqpo1h6bZi9T56sxLy+k=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6607cf789e541e7873d40d3a8f7815ea92204f32",
|
||||
"rev": "c80f6a7e10b39afcc1894e02ef785b1ad0b0d7e5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
|
|||
197
home/environments/i3/conky/components.lua
Normal file
197
home/environments/i3/conky/components.lua
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
local _dir_ = debug.getinfo(1, "S").source:sub(2):match("(.*[/\\])") or "./"
|
||||
package.path = _dir_ .. "?.lua"
|
||||
|
||||
kat = require("kat")
|
||||
liluat = require("liluat")
|
||||
ftcsv = require("ftcsv")
|
||||
|
||||
function tpl(t)
|
||||
local ct = liluat.compile(t, { start_tag = "{%", end_tag = "%}" })
|
||||
return function(values)
|
||||
return liluat.render(ct, values)
|
||||
end
|
||||
end
|
||||
|
||||
function string.split(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
components = {}
|
||||
local base_font = "Monaspace Krypton"
|
||||
|
||||
local formats = {
|
||||
h1 = string.format("%s:bold:size=16", base_font),
|
||||
font = "${font %s}%s$font",
|
||||
item = "${color grey}%s:$color",
|
||||
}
|
||||
|
||||
local font_formats = {
|
||||
h1 = "not_nil",
|
||||
}
|
||||
|
||||
function conky_fmt(trigger, ...)
|
||||
text = table.concat({...}, " ")
|
||||
if font_formats[trigger] ~= nil then
|
||||
return conky_parse(string.format(formats.font, formats[trigger], text))
|
||||
else
|
||||
return conky_parse(string.format(formats[trigger], text))
|
||||
end
|
||||
end
|
||||
|
||||
function cpu_model()
|
||||
return string.format([=[${execi %i grep model /proc/cpuinfo | cut -d : -f2 | tail -1 | sed 's/\s//' | sed 's/ [[:digit:]]*-Core Processor$//g'}]=], kat.exec_interval)
|
||||
end
|
||||
|
||||
function cpu_sct()
|
||||
return string.format("${execi %i %scpu_sct.sh}", kat.exec_interval, kat.root_dir)
|
||||
end
|
||||
|
||||
|
||||
function conky_cpu_section()
|
||||
local cpu_tpl = tpl([[
|
||||
${lua fmt h1 CPU} ${hr}
|
||||
${color grey}Variety:$color {%= cpu_model() %} {%= cpu_sct() %}
|
||||
${cpugraph}
|
||||
${color grey}Frequency:$color ${freq_g} GHz
|
||||
${color grey}Usage:$color $cpu%
|
||||
]])
|
||||
return conky_parse(cpu_tpl({ cpu_model = cpu_model, cpu_sct = cpu_sct }))
|
||||
end
|
||||
|
||||
function gpu_query(query)
|
||||
return string.format([[${execi %i nvidia-smi --query-gpu=%s --format=csv,noheader | sed 's/\d*\s\%%//' }]], 15, query)
|
||||
end
|
||||
|
||||
-- GPU Query
|
||||
local query_headers = {
|
||||
"index",
|
||||
"name",
|
||||
"driver_version",
|
||||
"fan.speed",
|
||||
"utilization.gpu",
|
||||
"utilization.memory",
|
||||
"utilization.encoder",
|
||||
"utilization.decoder",
|
||||
"clocks.current.graphics",
|
||||
"clocks.current.sm",
|
||||
"clocks.current.memory",
|
||||
"clocks.current.video",
|
||||
"memory.used",
|
||||
"memory.total",
|
||||
"temperature.gpu",
|
||||
}
|
||||
local gpu_display_templates = {
|
||||
index = "${lua fmt h1 GPU %s} ${hr}",
|
||||
default = "${lua fmt item %s} %s",
|
||||
}
|
||||
local gpu_header_aliases = {
|
||||
["name"] = "Card",
|
||||
["driver_version"] = "Driver Version",
|
||||
["fan.speed"] = "Fan Speed",
|
||||
["utilization.gpu"] = "Core Usage",
|
||||
["utilization.memory"] = "Memory Usage",
|
||||
["utilization.encoder"] = "Encoder Usage",
|
||||
["utilization.decoder"] = "Decoder Usage",
|
||||
["clocks.current.graphics"] = "Core Frequency",
|
||||
["clocks.current.sm"] = "SM Frequency",
|
||||
["clocks.current.memory"] = "Memory Frequency",
|
||||
["clocks.current.video"] = "Video Frequency",
|
||||
["memory.used"] = "Memory Used",
|
||||
["memory.total"] = "Memory Total",
|
||||
["temperature.gpu"] = "Temperature",
|
||||
};
|
||||
-- Reverse index
|
||||
local query_headers_index = {}
|
||||
for i, header in ipairs(query_headers) do
|
||||
query_headers_index[header] = i
|
||||
end
|
||||
-- Command generation caching
|
||||
local query_header_string = table.concat(query_headers, ",")
|
||||
local query_command = string.format("nvidia-smi --query-gpu=%s --format=csv,nounits", query_header_string)
|
||||
local headers = nil
|
||||
function gpu_csv_query()
|
||||
local gpus = {}
|
||||
local query_ran = io.popen(query_command)
|
||||
local query = query_ran:read("*all")
|
||||
local query = query:gsub(",%s", ",")
|
||||
local items, raw_headers = ftcsv.parse(query, {
|
||||
loadFromString = true,
|
||||
})
|
||||
if headers == nil then
|
||||
headers = {}
|
||||
for i, heading in ipairs(raw_headers) do
|
||||
local heading_split = string.split(heading)
|
||||
local query_unit
|
||||
local key = heading_split[1]
|
||||
-- if the heading does not have a [unit] section
|
||||
if #heading_split == 1 then
|
||||
-- use a table to define what the key's unit should be
|
||||
local keys_to_units = {
|
||||
["temperature.gpu"] = "°C",
|
||||
index = nil,
|
||||
driver_version = nil,
|
||||
name = nil,
|
||||
}
|
||||
-- give it a unit
|
||||
query_unit = keys_to_units[key]
|
||||
else
|
||||
query_unit = string.sub(heading_split[2], 2, -2)
|
||||
local unit_remap = {
|
||||
MiB = " MiB"
|
||||
}
|
||||
if unit_remap[query_unit] ~= nil then
|
||||
query_unit = unit_remap[query_unit]
|
||||
end
|
||||
end
|
||||
headers[heading] = {
|
||||
clean = key,
|
||||
unit = query_unit
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
for i, gpu in pairs(items) do
|
||||
current_gpu = {}
|
||||
for header, data in pairs(gpu) do
|
||||
local cur_header = headers[header]
|
||||
|
||||
local subformat = "%s%s"
|
||||
local unit = cur_header.unit or ""
|
||||
data_sf = string.format(subformat, data, unit)
|
||||
|
||||
|
||||
local display_idx = query_headers_index[cur_header.clean] or 500
|
||||
if gpu_display_templates[cur_header.clean] ~= nil then
|
||||
display = string.format(gpu_display_templates[cur_header.clean], data_sf)
|
||||
else
|
||||
display = string.format(gpu_display_templates.default, gpu_header_aliases[cur_header.clean], data_sf)
|
||||
end
|
||||
current_gpu[display_idx] = display
|
||||
end
|
||||
gpus[i] = current_gpu
|
||||
end
|
||||
|
||||
return gpus
|
||||
end
|
||||
|
||||
-- GPU Display
|
||||
function conky_gpu_section()
|
||||
gpus = gpu_csv_query()
|
||||
-- TODO: Replace NVIDIA-SMI separate calls with all 1 call and a CSV parsing
|
||||
local text = ""
|
||||
for idx, gpu in pairs(gpus) do
|
||||
for i=1,#gpu do
|
||||
text = text .. string.format("%s\n", gpu[i])
|
||||
end
|
||||
end
|
||||
return conky_parse(text)
|
||||
end
|
||||
|
||||
return components
|
||||
75
home/environments/i3/conky/conky.conf
Normal file
75
home/environments/i3/conky/conky.conf
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
|
||||
local _dir_ = debug.getinfo(1, "S").source:sub(2):match("(.*[/\\])") or "./"
|
||||
package.path = _dir_ .. "?.lua"
|
||||
|
||||
kat = require("kat")
|
||||
|
||||
conky.config = {
|
||||
alignment = 'top_left',
|
||||
background = false,
|
||||
border_width = 1,
|
||||
cpu_avg_samples = 2,
|
||||
default_color = 'white',
|
||||
default_outline_color = 'white',
|
||||
default_shade_color = 'white',
|
||||
default_graph_height = 60,
|
||||
double_buffer = true,
|
||||
draw_borders = false,
|
||||
draw_graph_borders = true,
|
||||
draw_outline = false,
|
||||
draw_shades = false,
|
||||
extra_newline = false,
|
||||
font = 'Monaspace Krypton:size=12',
|
||||
gap_x = 60,
|
||||
gap_y = 60,
|
||||
minimum_height = 5,
|
||||
minimum_width = 5,
|
||||
net_avg_samples = 2,
|
||||
no_buffers = true,
|
||||
out_to_console = false,
|
||||
out_to_ncurses = false,
|
||||
out_to_stderr = false,
|
||||
out_to_wayland = false,
|
||||
out_to_x = true,
|
||||
own_window = true,
|
||||
own_window_class = 'Conky',
|
||||
own_window_type = 'override',
|
||||
-- own_window_transparent = true,
|
||||
own_window_colour = "000000",
|
||||
own_window_argb_visual = true,
|
||||
own_window_argb_value = 153,
|
||||
own_window_hints = 'undecorated,sticky,below,skip_taskbar,skip_pager',
|
||||
show_graph_range = false,
|
||||
show_graph_scale = false,
|
||||
stippled_borders = 0,
|
||||
update_interval = 1.0,
|
||||
uppercase = false,
|
||||
use_spacer = 'none',
|
||||
use_xft = true,
|
||||
lua_load = _dir_ .. "components.lua",
|
||||
}
|
||||
|
||||
|
||||
conky.text = [[
|
||||
$sysname $nodename $kernel $machine
|
||||
$hr
|
||||
${color grey}Uptime:$color $uptime
|
||||
${color grey}Frequency:$color $freq_g GHz
|
||||
${color grey}RAM Usage:$color $mem/$memmax - $memperc% ${membar 4}
|
||||
${color grey}Swap Usage:$color $swap/$swapmax - $swapperc% ${swapbar 4}
|
||||
${color grey}CPU Usage:$color $cpu% ${cpubar 4}
|
||||
${color grey}Processes:$color $processes ${color grey}Running:$color $running_processes
|
||||
$hr
|
||||
${color grey}File systems:
|
||||
/ $color${fs_used /}/${fs_size /} ${fs_bar 6 /}
|
||||
${color grey}Networking:
|
||||
Up:$color ${upspeed} ${color grey} - Down:$color ${downspeed}
|
||||
$hr
|
||||
${color grey}Name PID CPU% MEM%
|
||||
${color lightgrey} ${top name 1} ${top pid 1} ${top cpu 1} ${top mem 1}
|
||||
${color lightgrey} ${top name 2} ${top pid 2} ${top cpu 2} ${top mem 2}
|
||||
${color lightgrey} ${top name 3} ${top pid 3} ${top cpu 3} ${top mem 3}
|
||||
${color lightgrey} ${top name 4} ${top pid 4} ${top cpu 4} ${top mem 4}
|
||||
${lua cpu_section}
|
||||
${lua gpu_section}
|
||||
]]
|
||||
29
home/environments/i3/conky/conky.nix
Normal file
29
home/environments/i3/conky/conky.nix
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
{ config, pkgs, ... }: {
|
||||
home.packages = with pkgs; [
|
||||
jq
|
||||
conky
|
||||
];
|
||||
|
||||
xdg.configFile.conky = {
|
||||
recursive = true;
|
||||
source = config.lib.file.mkOutOfStoreSymlink ./.;
|
||||
};
|
||||
|
||||
systemd.user.services.conky = {
|
||||
Unit = {
|
||||
Description = "Conky - Lightweight system monitor";
|
||||
After = [ "graphical-session.target" ];
|
||||
X-Restart-Triggers = [
|
||||
./conky.conf
|
||||
];
|
||||
};
|
||||
|
||||
Service = {
|
||||
Restart = "always";
|
||||
RestartSec = "3";
|
||||
ExecStart = toString ([ "${pkgs.conky}/bin/conky"]);
|
||||
};
|
||||
|
||||
Install.WantedBy = [ "graphical-session.target" ];
|
||||
};
|
||||
}
|
||||
12
home/environments/i3/conky/cpu_sct.sh
Executable file
12
home/environments/i3/conky/cpu_sct.sh
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
CPU_JSON=$(lscpu --json | jq '.lscpu | reduce .[] as $i ({}; .[$i.field] = $i.data)')
|
||||
CPU_SOCKETS=$(echo "$CPU_JSON" | jq '."Socket(s):"' -r)
|
||||
CPU_CORES_PER_SOCKET=$(echo "$CPU_JSON" | jq '."Core(s) per socket:"' -r)
|
||||
CPU_THREADS_PER_CORE=$(echo "$CPU_JSON" | jq '."Thread(s) per core:"' -r)
|
||||
CPU_TOTAL_THREADS_PER_SOCKET=$(($CPU_CORES_PER_SOCKET*$CPU_THREADS_PER_CORE))
|
||||
if [ $CPU_SOCKETS -eq 1 ]; then
|
||||
echo "${CPU_CORES_PER_SOCKET}c${CPU_TOTAL_THREADS_PER_SOCKET}t"
|
||||
else
|
||||
echo "${CPU_SOCKETS}s${CPU_CORES_PER_SOCKET}t${CPU_TOTAL_THREADS_PER_SOCKET}t"
|
||||
fi
|
||||
819
home/environments/i3/conky/ftcsv.lua
Normal file
819
home/environments/i3/conky/ftcsv.lua
Normal file
|
|
@ -0,0 +1,819 @@
|
|||
local ftcsv = {
|
||||
_VERSION = 'ftcsv 1.4.0',
|
||||
_DESCRIPTION = 'CSV library for Lua',
|
||||
_URL = 'https://github.com/FourierTransformer/ftcsv',
|
||||
_LICENSE = [[
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016-2023 Shakil Thakur
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
]]
|
||||
}
|
||||
|
||||
-- perf
|
||||
local sbyte = string.byte
|
||||
local ssub = string.sub
|
||||
|
||||
-- luajit/lua compatability layer
|
||||
local luaCompatibility = {}
|
||||
if type(jit) == 'table' or _ENV then
|
||||
-- luajit and lua 5.2+
|
||||
luaCompatibility.load = _G.load
|
||||
else
|
||||
-- lua 5.1
|
||||
luaCompatibility.load = loadstring
|
||||
end
|
||||
|
||||
-- luajit specific speedups
|
||||
-- luajit performs faster with iterating over string.byte,
|
||||
-- whereas vanilla lua performs faster with string.find
|
||||
if type(jit) == 'table' then
|
||||
luaCompatibility.LuaJIT = true
|
||||
-- finds the end of an escape sequence
|
||||
function luaCompatibility.findClosingQuote(i, inputLength, inputString, quote, doubleQuoteEscape)
|
||||
local currentChar, nextChar = sbyte(inputString, i), nil
|
||||
while i <= inputLength do
|
||||
nextChar = sbyte(inputString, i+1)
|
||||
|
||||
-- this one deals with " double quotes that are escaped "" within single quotes "
|
||||
-- these should be turned into a single quote at the end of the field
|
||||
if currentChar == quote and nextChar == quote then
|
||||
doubleQuoteEscape = true
|
||||
i = i + 2
|
||||
currentChar = sbyte(inputString, i)
|
||||
|
||||
-- identifies the escape toggle
|
||||
elseif currentChar == quote and nextChar ~= quote then
|
||||
return i-1, doubleQuoteEscape
|
||||
else
|
||||
i = i + 1
|
||||
currentChar = nextChar
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
else
|
||||
luaCompatibility.LuaJIT = false
|
||||
|
||||
-- vanilla lua closing quote finder
|
||||
function luaCompatibility.findClosingQuote(i, inputLength, inputString, quote, doubleQuoteEscape)
|
||||
local j, difference
|
||||
i, j = inputString:find('"+', i)
|
||||
if j == nil then
|
||||
return nil
|
||||
end
|
||||
difference = j - i
|
||||
if difference >= 1 then doubleQuoteEscape = true end
|
||||
if difference % 2 == 1 then
|
||||
return luaCompatibility.findClosingQuote(j+1, inputLength, inputString, quote, doubleQuoteEscape)
|
||||
end
|
||||
return j-1, doubleQuoteEscape
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- determine the real headers as opposed to the header mapping
|
||||
local function determineRealHeaders(headerField, fieldsToKeep)
|
||||
local realHeaders = {}
|
||||
local headerSet = {}
|
||||
for i = 1, #headerField do
|
||||
if not headerSet[headerField[i]] then
|
||||
if fieldsToKeep ~= nil and fieldsToKeep[headerField[i]] then
|
||||
table.insert(realHeaders, headerField[i])
|
||||
headerSet[headerField[i]] = true
|
||||
elseif fieldsToKeep == nil then
|
||||
table.insert(realHeaders, headerField[i])
|
||||
headerSet[headerField[i]] = true
|
||||
end
|
||||
end
|
||||
end
|
||||
return realHeaders
|
||||
end
|
||||
|
||||
|
||||
local function determineTotalColumnCount(headerField, fieldsToKeep)
|
||||
local totalColumnCount = 0
|
||||
local headerFieldSet = {}
|
||||
for _, header in pairs(headerField) do
|
||||
-- count unique columns and
|
||||
-- also figure out if it's a field to keep
|
||||
if not headerFieldSet[header] and
|
||||
(fieldsToKeep == nil or fieldsToKeep[header]) then
|
||||
headerFieldSet[header] = true
|
||||
totalColumnCount = totalColumnCount + 1
|
||||
end
|
||||
end
|
||||
return totalColumnCount
|
||||
end
|
||||
|
||||
local function generateHeadersMetamethod(finalHeaders)
|
||||
-- if a header field tries to escape, we will simply return nil
|
||||
-- the parser will still parse, but wont get the performance benefit of
|
||||
-- having headers predefined
|
||||
for _, headers in ipairs(finalHeaders) do
|
||||
if headers:find("]") then
|
||||
return nil
|
||||
end
|
||||
end
|
||||
local rawSetup = "local t, k, _ = ... \
|
||||
rawset(t, k, {[ [[%s]] ]=true})"
|
||||
rawSetup = rawSetup:format(table.concat(finalHeaders, "]] ]=true, [ [["))
|
||||
return luaCompatibility.load(rawSetup)
|
||||
end
|
||||
|
||||
-- main function used to parse
|
||||
local function parseString(inputString, i, options)
|
||||
|
||||
-- keep track of my chars!
|
||||
local inputLength = options.inputLength or #inputString
|
||||
local currentChar, nextChar = sbyte(inputString, i), nil
|
||||
local skipChar = 0
|
||||
local field
|
||||
local fieldStart = i
|
||||
local fieldNum = 1
|
||||
local lineNum = 1
|
||||
local lineStart = i
|
||||
local doubleQuoteEscape, emptyIdentified = false, false
|
||||
|
||||
local skipIndex
|
||||
local charPatternToSkip = "[" .. options.delimiter .. "\r\n]"
|
||||
|
||||
--bytes
|
||||
local CR = sbyte("\r")
|
||||
local LF = sbyte("\n")
|
||||
local quote = sbyte('"')
|
||||
local delimiterByte = sbyte(options.delimiter)
|
||||
|
||||
-- explode most used options
|
||||
local headersMetamethod = options.headersMetamethod
|
||||
local fieldsToKeep = options.fieldsToKeep
|
||||
local ignoreQuotes = options.ignoreQuotes
|
||||
local headerField = options.headerField
|
||||
local endOfFile = options.endOfFile
|
||||
local buffered = options.buffered
|
||||
|
||||
local outResults = {}
|
||||
|
||||
-- in the first run, the headers haven't been set yet.
|
||||
if headerField == nil then
|
||||
headerField = {}
|
||||
-- setup a metatable to simply return the key that's passed in
|
||||
local headerMeta = {__index = function(_, key) return key end}
|
||||
setmetatable(headerField, headerMeta)
|
||||
end
|
||||
|
||||
if headersMetamethod then
|
||||
setmetatable(outResults, {__newindex = headersMetamethod})
|
||||
end
|
||||
outResults[1] = {}
|
||||
|
||||
-- totalColumnCount based on unique headers and fieldsToKeep
|
||||
local totalColumnCount = options.totalColumnCount or determineTotalColumnCount(headerField, fieldsToKeep)
|
||||
|
||||
local function assignValueToField()
|
||||
if fieldsToKeep == nil or fieldsToKeep[headerField[fieldNum]] then
|
||||
|
||||
-- create new field
|
||||
if ignoreQuotes == false and sbyte(inputString, i-1) == quote then
|
||||
field = ssub(inputString, fieldStart, i-2)
|
||||
else
|
||||
field = ssub(inputString, fieldStart, i-1)
|
||||
end
|
||||
if doubleQuoteEscape then
|
||||
field = field:gsub('""', '"')
|
||||
end
|
||||
|
||||
-- reset flags
|
||||
doubleQuoteEscape = false
|
||||
emptyIdentified = false
|
||||
|
||||
-- assign field in output
|
||||
if headerField[fieldNum] ~= nil then
|
||||
outResults[lineNum][headerField[fieldNum]] = field
|
||||
else
|
||||
error('ftcsv: too many columns in row ' .. options.rowOffset + lineNum)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
while i <= inputLength do
|
||||
-- go by two chars at a time,
|
||||
-- currentChar is set at the bottom.
|
||||
nextChar = sbyte(inputString, i+1)
|
||||
|
||||
-- empty string
|
||||
if ignoreQuotes == false and currentChar == quote and nextChar == quote then
|
||||
skipChar = 1
|
||||
fieldStart = i + 2
|
||||
emptyIdentified = true
|
||||
|
||||
-- escape toggle.
|
||||
-- This can only happen if fields have quotes around them
|
||||
-- so the current "start" has to be where a quote character is.
|
||||
elseif ignoreQuotes == false and currentChar == quote and nextChar ~= quote and fieldStart == i then
|
||||
fieldStart = i + 1
|
||||
-- if an empty field was identified before assignment, it means
|
||||
-- that this is a quoted field that starts with escaped quotes
|
||||
-- ex: """a"""
|
||||
if emptyIdentified then
|
||||
fieldStart = fieldStart - 2
|
||||
emptyIdentified = false
|
||||
end
|
||||
skipChar = 1
|
||||
i, doubleQuoteEscape = luaCompatibility.findClosingQuote(i+1, inputLength, inputString, quote, doubleQuoteEscape)
|
||||
|
||||
-- create some fields
|
||||
elseif currentChar == delimiterByte then
|
||||
assignValueToField()
|
||||
|
||||
-- increaseFieldIndices
|
||||
fieldNum = fieldNum + 1
|
||||
fieldStart = i + 1
|
||||
|
||||
-- newline
|
||||
elseif (currentChar == LF or currentChar == CR) then
|
||||
assignValueToField()
|
||||
|
||||
-- handle CRLF
|
||||
if (currentChar == CR and nextChar == LF) then
|
||||
skipChar = 1
|
||||
fieldStart = fieldStart + 1
|
||||
end
|
||||
|
||||
-- incrememnt for new line
|
||||
if fieldNum < totalColumnCount then
|
||||
-- sometimes in buffered mode, the buffer starts with a newline
|
||||
-- this skips the newline and lets the parsing continue.
|
||||
if buffered and lineNum == 1 and fieldNum == 1 and field == "" then
|
||||
fieldStart = i + 1 + skipChar
|
||||
lineStart = fieldStart
|
||||
else
|
||||
error('ftcsv: too few columns in row ' .. options.rowOffset + lineNum)
|
||||
end
|
||||
else
|
||||
lineNum = lineNum + 1
|
||||
outResults[lineNum] = {}
|
||||
fieldNum = 1
|
||||
fieldStart = i + 1 + skipChar
|
||||
lineStart = fieldStart
|
||||
end
|
||||
|
||||
elseif luaCompatibility.LuaJIT == false then
|
||||
skipIndex = inputString:find(charPatternToSkip, i)
|
||||
if skipIndex then
|
||||
skipChar = skipIndex - i - 1
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
-- in buffered mode and it can't find the closing quote
|
||||
-- it usually means in the middle of a buffer and need to backtrack
|
||||
if i == nil then
|
||||
if buffered then
|
||||
outResults[lineNum] = nil
|
||||
return outResults, lineStart
|
||||
else
|
||||
error("ftcsv: can't find closing quote in row " .. options.rowOffset + lineNum ..
|
||||
". Try running with the option ignoreQuotes=true if the source incorrectly uses quotes.")
|
||||
end
|
||||
end
|
||||
|
||||
-- Increment Counter
|
||||
i = i + 1 + skipChar
|
||||
if (skipChar > 0) then
|
||||
currentChar = sbyte(inputString, i)
|
||||
else
|
||||
currentChar = nextChar
|
||||
end
|
||||
skipChar = 0
|
||||
end
|
||||
|
||||
if buffered and not endOfFile then
|
||||
outResults[lineNum] = nil
|
||||
return outResults, lineStart
|
||||
end
|
||||
|
||||
-- create last new field
|
||||
assignValueToField()
|
||||
|
||||
-- remove last field if empty
|
||||
if fieldNum < totalColumnCount then
|
||||
|
||||
-- indicates last field was really just a CRLF,
|
||||
-- so, it can be removed
|
||||
if fieldNum == 1 and field == "" then
|
||||
outResults[lineNum] = nil
|
||||
else
|
||||
error('ftcsv: too few columns in row ' .. options.rowOffset + lineNum)
|
||||
end
|
||||
end
|
||||
|
||||
return outResults, i, totalColumnCount
|
||||
end
|
||||
|
||||
local function handleHeaders(headerField, options)
|
||||
-- for files where there aren't headers!
|
||||
if options.headers == false then
|
||||
for j = 1, #headerField do
|
||||
headerField[j] = j
|
||||
end
|
||||
else
|
||||
-- make sure a header isn't empty if there are headers
|
||||
for _, headerName in ipairs(headerField) do
|
||||
if #headerName == 0 then
|
||||
error('ftcsv: Cannot parse a file which contains empty headers')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- rename fields as needed!
|
||||
if options.rename then
|
||||
-- basic rename (["a" = "apple"])
|
||||
for j = 1, #headerField do
|
||||
if options.rename[headerField[j]] then
|
||||
headerField[j] = options.rename[headerField[j]]
|
||||
end
|
||||
end
|
||||
-- files without headers, but with a options.rename need to be handled too!
|
||||
if #options.rename > 0 then
|
||||
for j = 1, #options.rename do
|
||||
headerField[j] = options.rename[j]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- apply some sweet header manipulation
|
||||
if options.headerFunc then
|
||||
for j = 1, #headerField do
|
||||
headerField[j] = options.headerFunc(headerField[j])
|
||||
end
|
||||
end
|
||||
|
||||
return headerField
|
||||
end
|
||||
|
||||
-- load an entire file into memory
|
||||
local function loadFile(textFile, amount)
|
||||
local file = io.open(textFile, "r")
|
||||
if not file then error("ftcsv: File not found at " .. textFile) end
|
||||
local lines = file:read(amount)
|
||||
if amount == "*all" then
|
||||
file:close()
|
||||
end
|
||||
return lines, file
|
||||
end
|
||||
|
||||
local function initializeInputFromStringOrFile(inputFile, options, amount)
|
||||
-- handle input via string or file!
|
||||
local inputString, file
|
||||
if options.loadFromString then
|
||||
inputString = inputFile
|
||||
else
|
||||
inputString, file = loadFile(inputFile, amount)
|
||||
end
|
||||
|
||||
-- if they sent in an empty file...
|
||||
if inputString == "" then
|
||||
error('ftcsv: Cannot parse an empty file')
|
||||
end
|
||||
return inputString, file
|
||||
end
|
||||
|
||||
local function determineArgumentOrder(delimiter, options)
|
||||
-- backwards compatibile layer
|
||||
if type(delimiter) == "string" then
|
||||
return delimiter, options
|
||||
|
||||
-- the new format for parseLine
|
||||
elseif type(delimiter) == "table" then
|
||||
local realDelimiter = delimiter.delimiter or ","
|
||||
return realDelimiter, delimiter
|
||||
|
||||
-- if nothing is specified, assume "," delimited and call it a day!
|
||||
else
|
||||
return ",", nil
|
||||
end
|
||||
end
|
||||
|
||||
local function parseOptions(delimiter, options, fromParseLine)
|
||||
-- delimiter MUST be one character
|
||||
assert(#delimiter == 1 and type(delimiter) == "string", "the delimiter must be of string type and exactly one character")
|
||||
|
||||
local fieldsToKeep = nil
|
||||
|
||||
if options then
|
||||
|
||||
if options.headers ~= nil then
|
||||
assert(type(options.headers) == "boolean", "ftcsv only takes the boolean 'true' or 'false' for the optional parameter 'headers' (default 'true'). You passed in '" .. tostring(options.headers) .. "' of type '" .. type(options.headers) .. "'.")
|
||||
end
|
||||
|
||||
if options.rename ~= nil then
|
||||
assert(type(options.rename) == "table", "ftcsv only takes in a key-value table for the optional parameter 'rename'. You passed in '" .. tostring(options.rename) .. "' of type '" .. type(options.rename) .. "'.")
|
||||
end
|
||||
|
||||
if options.fieldsToKeep ~= nil then
|
||||
assert(type(options.fieldsToKeep) == "table", "ftcsv only takes in a list (as a table) for the optional parameter 'fieldsToKeep'. You passed in '" .. tostring(options.fieldsToKeep) .. "' of type '" .. type(options.fieldsToKeep) .. "'.")
|
||||
local ofieldsToKeep = options.fieldsToKeep
|
||||
if ofieldsToKeep ~= nil then
|
||||
fieldsToKeep = {}
|
||||
for j = 1, #ofieldsToKeep do
|
||||
fieldsToKeep[ofieldsToKeep[j]] = true
|
||||
end
|
||||
end
|
||||
if options.headers == false and options.rename == nil then
|
||||
error("ftcsv: fieldsToKeep only works with header-less files when using the 'rename' functionality")
|
||||
end
|
||||
end
|
||||
|
||||
if options.loadFromString ~= nil then
|
||||
assert(type(options.loadFromString) == "boolean", "ftcsv only takes a boolean value for optional parameter 'loadFromString'. You passed in '" .. tostring(options.loadFromString) .. "' of type '" .. type(options.loadFromString) .. "'.")
|
||||
end
|
||||
|
||||
if options.headerFunc ~= nil then
|
||||
assert(type(options.headerFunc) == "function", "ftcsv only takes a function value for optional parameter 'headerFunc'. You passed in '" .. tostring(options.headerFunc) .. "' of type '" .. type(options.headerFunc) .. "'.")
|
||||
end
|
||||
|
||||
if options.ignoreQuotes == nil then
|
||||
options.ignoreQuotes = false
|
||||
else
|
||||
assert(type(options.ignoreQuotes) == "boolean", "ftcsv only takes a boolean value for optional parameter 'ignoreQuotes'. You passed in '" .. tostring(options.ignoreQuotes) .. "' of type '" .. type(options.ignoreQuotes) .. "'.")
|
||||
end
|
||||
|
||||
if fromParseLine == true then
|
||||
if options.bufferSize == nil then
|
||||
options.bufferSize = 2^16
|
||||
else
|
||||
assert(type(options.bufferSize) == "number", "ftcsv only takes a number value for optional parameter 'bufferSize'. You passed in '" .. tostring(options.bufferSize) .. "' of type '" .. type(options.bufferSize) .. "'.")
|
||||
end
|
||||
|
||||
else
|
||||
if options.bufferSize ~= nil then
|
||||
error("ftcsv: bufferSize can only be specified using 'parseLine'. When using 'parse', the entire file is read into memory")
|
||||
end
|
||||
end
|
||||
|
||||
else
|
||||
options = {
|
||||
["headers"] = true,
|
||||
["loadFromString"] = false,
|
||||
["ignoreQuotes"] = false,
|
||||
["bufferSize"] = 2^16
|
||||
}
|
||||
end
|
||||
|
||||
return options, fieldsToKeep
|
||||
|
||||
end
|
||||
|
||||
local function findEndOfHeaders(str, entireFile)
|
||||
local i = 1
|
||||
local quote = sbyte('"')
|
||||
local newlines = {
|
||||
[sbyte("\n")] = true,
|
||||
[sbyte("\r")] = true
|
||||
}
|
||||
local quoted = false
|
||||
local char = sbyte(str, i)
|
||||
repeat
|
||||
-- this should still work for escaped quotes
|
||||
-- ex: " a "" b \r\n " -- there is always a pair around the newline
|
||||
if char == quote then
|
||||
quoted = not quoted
|
||||
end
|
||||
i = i + 1
|
||||
char = sbyte(str, i)
|
||||
until (newlines[char] and not quoted) or char == nil
|
||||
|
||||
if not entireFile and char == nil then
|
||||
error("ftcsv: bufferSize needs to be larger to parse this file")
|
||||
end
|
||||
|
||||
local nextChar = sbyte(str, i+1)
|
||||
if nextChar == sbyte("\n") and char == sbyte("\r") then
|
||||
i = i + 1
|
||||
end
|
||||
return i
|
||||
end
|
||||
|
||||
local function determineBOMOffset(inputString)
|
||||
-- BOM files start with bytes 239, 187, 191
|
||||
if sbyte(inputString, 1) == 239
|
||||
and sbyte(inputString, 2) == 187
|
||||
and sbyte(inputString, 3) == 191 then
|
||||
return 4
|
||||
else
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
local function parseHeadersAndSetupArgs(inputString, delimiter, options, fieldsToKeep, entireFile)
|
||||
local startLine = determineBOMOffset(inputString)
|
||||
|
||||
local endOfHeaderRow = findEndOfHeaders(inputString, entireFile)
|
||||
|
||||
local parserArgs = {
|
||||
delimiter = delimiter,
|
||||
headerField = nil,
|
||||
fieldsToKeep = nil,
|
||||
inputLength = endOfHeaderRow,
|
||||
buffered = false,
|
||||
ignoreQuotes = options.ignoreQuotes,
|
||||
rowOffset = 0
|
||||
}
|
||||
|
||||
local rawHeaders, endOfHeaders = parseString(inputString, startLine, parserArgs)
|
||||
|
||||
-- manipulate the headers as per the options
|
||||
local modifiedHeaders = handleHeaders(rawHeaders[1], options)
|
||||
parserArgs.headerField = modifiedHeaders
|
||||
parserArgs.fieldsToKeep = fieldsToKeep
|
||||
parserArgs.inputLength = nil
|
||||
|
||||
if options.headers == false then endOfHeaders = startLine end
|
||||
|
||||
local finalHeaders = determineRealHeaders(modifiedHeaders, fieldsToKeep)
|
||||
if options.headers ~= false then
|
||||
local headersMetamethod = generateHeadersMetamethod(finalHeaders)
|
||||
parserArgs.headersMetamethod = headersMetamethod
|
||||
end
|
||||
|
||||
return endOfHeaders, parserArgs, finalHeaders
|
||||
end
|
||||
|
||||
-- runs the show!
|
||||
function ftcsv.parse(inputFile, delimiter, options)
|
||||
local delimiter, options = determineArgumentOrder(delimiter, options)
|
||||
|
||||
local options, fieldsToKeep = parseOptions(delimiter, options, false)
|
||||
|
||||
local inputString = initializeInputFromStringOrFile(inputFile, options, "*all")
|
||||
|
||||
local endOfHeaders, parserArgs, finalHeaders = parseHeadersAndSetupArgs(inputString, delimiter, options, fieldsToKeep, true)
|
||||
|
||||
local output = parseString(inputString, endOfHeaders, parserArgs)
|
||||
|
||||
return output, finalHeaders
|
||||
end
|
||||
|
||||
local function getFileSize (file)
|
||||
local current = file:seek()
|
||||
local size = file:seek("end")
|
||||
file:seek("set", current)
|
||||
return size
|
||||
end
|
||||
|
||||
local function determineAtEndOfFile(file, fileSize)
|
||||
if file:seek() >= fileSize then
|
||||
return true
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
local function initializeInputFile(inputString, options)
|
||||
if options.loadFromString == true then
|
||||
error("ftcsv: parseLine currently doesn't support loading from string")
|
||||
end
|
||||
return initializeInputFromStringOrFile(inputString, options, options.bufferSize)
|
||||
end
|
||||
|
||||
function ftcsv.parseLine(inputFile, delimiter, userOptions)
|
||||
local delimiter, userOptions = determineArgumentOrder(delimiter, userOptions)
|
||||
local options, fieldsToKeep = parseOptions(delimiter, userOptions, true)
|
||||
local inputString, file = initializeInputFile(inputFile, options)
|
||||
|
||||
|
||||
local fileSize, atEndOfFile = 0, false
|
||||
fileSize = getFileSize(file)
|
||||
atEndOfFile = determineAtEndOfFile(file, fileSize)
|
||||
|
||||
local endOfHeaders, parserArgs, _ = parseHeadersAndSetupArgs(inputString, delimiter, options, fieldsToKeep, atEndOfFile)
|
||||
parserArgs.buffered = true
|
||||
parserArgs.endOfFile = atEndOfFile
|
||||
|
||||
local parsedBuffer, endOfParsedInput, totalColumnCount = parseString(inputString, endOfHeaders, parserArgs)
|
||||
parserArgs.totalColumnCount = totalColumnCount
|
||||
|
||||
inputString = ssub(inputString, endOfParsedInput)
|
||||
local bufferIndex, returnedRowsCount = 0, 0
|
||||
local currentRow, buffer
|
||||
|
||||
return function()
|
||||
-- check parsed buffer for value
|
||||
bufferIndex = bufferIndex + 1
|
||||
currentRow = parsedBuffer[bufferIndex]
|
||||
if currentRow then
|
||||
returnedRowsCount = returnedRowsCount + 1
|
||||
return returnedRowsCount, currentRow
|
||||
end
|
||||
|
||||
-- read more of the input
|
||||
buffer = file:read(options.bufferSize)
|
||||
if not buffer then
|
||||
file:close()
|
||||
return nil
|
||||
else
|
||||
parserArgs.endOfFile = determineAtEndOfFile(file, fileSize)
|
||||
end
|
||||
|
||||
-- appends the new input to what was left over
|
||||
inputString = inputString .. buffer
|
||||
|
||||
-- re-analyze and load buffer
|
||||
parserArgs.rowOffset = returnedRowsCount
|
||||
parsedBuffer, endOfParsedInput = parseString(inputString, 1, parserArgs)
|
||||
bufferIndex = 1
|
||||
|
||||
-- cut the input string down
|
||||
inputString = ssub(inputString, endOfParsedInput)
|
||||
|
||||
if #parsedBuffer == 0 then
|
||||
error("ftcsv: bufferSize needs to be larger to parse this file")
|
||||
end
|
||||
|
||||
returnedRowsCount = returnedRowsCount + 1
|
||||
return returnedRowsCount, parsedBuffer[bufferIndex]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
-- The ENCODER code is below here
|
||||
-- This could be broken out, but is kept here for portability
|
||||
|
||||
|
||||
local function delimitField(field)
|
||||
field = tostring(field)
|
||||
if field:find('"') then
|
||||
return field:gsub('"', '""')
|
||||
else
|
||||
return field
|
||||
end
|
||||
end
|
||||
|
||||
local function generateDelimitAndQuoteField(delimiter)
|
||||
local generatedFunction = function(field)
|
||||
field = tostring(field)
|
||||
if field:find('"') then
|
||||
return '"' .. field:gsub('"', '""') .. '"'
|
||||
elseif field:find('[\n' .. delimiter .. ']') then
|
||||
return '"' .. field .. '"'
|
||||
else
|
||||
return field
|
||||
end
|
||||
end
|
||||
return generatedFunction
|
||||
end
|
||||
|
||||
local function escapeHeadersForLuaGenerator(headers)
|
||||
local escapedHeaders = {}
|
||||
for i = 1, #headers do
|
||||
if headers[i]:find('"') then
|
||||
escapedHeaders[i] = headers[i]:gsub('"', '\\"')
|
||||
else
|
||||
escapedHeaders[i] = headers[i]
|
||||
end
|
||||
end
|
||||
return escapedHeaders
|
||||
end
|
||||
|
||||
-- a function that compiles some lua code to quickly print out the csv
|
||||
local function csvLineGenerator(inputTable, delimiter, headers, options)
|
||||
local escapedHeaders = escapeHeadersForLuaGenerator(headers)
|
||||
|
||||
local outputFunc = [[
|
||||
local args, i = ...
|
||||
i = i + 1;
|
||||
if i > ]] .. #inputTable .. [[ then return nil end;
|
||||
return i, '"' .. args.delimitField(args.t[i]["]] ..
|
||||
table.concat(escapedHeaders, [["]) .. '"]] ..
|
||||
delimiter .. [["' .. args.delimitField(args.t[i]["]]) ..
|
||||
[["]) .. '"\r\n']]
|
||||
|
||||
if options and options.onlyRequiredQuotes == true then
|
||||
outputFunc = [[
|
||||
local args, i = ...
|
||||
i = i + 1;
|
||||
if i > ]] .. #inputTable .. [[ then return nil end;
|
||||
return i, args.delimitField(args.t[i]["]] ..
|
||||
table.concat(escapedHeaders, [["]) .. ']] ..
|
||||
delimiter .. [[' .. args.delimitField(args.t[i]["]]) ..
|
||||
[["]) .. '\r\n']]
|
||||
end
|
||||
|
||||
local arguments = {}
|
||||
arguments.t = inputTable
|
||||
-- we want to use the same delimitField throughout,
|
||||
-- so we're just going to pass it in
|
||||
if options and options.onlyRequiredQuotes == true then
|
||||
arguments.delimitField = generateDelimitAndQuoteField(delimiter)
|
||||
else
|
||||
arguments.delimitField = delimitField
|
||||
end
|
||||
|
||||
return luaCompatibility.load(outputFunc), arguments, 0
|
||||
|
||||
end
|
||||
|
||||
local function validateHeaders(headers, inputTable)
|
||||
for i = 1, #headers do
|
||||
if inputTable[1][headers[i]] == nil then
|
||||
error("ftcsv: the field '" .. headers[i] .. "' doesn't exist in the inputTable")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function initializeOutputWithEscapedHeaders(escapedHeaders, delimiter, options)
|
||||
local output = {}
|
||||
if options and options.onlyRequiredQuotes == true then
|
||||
output[1] = table.concat(escapedHeaders, delimiter) .. '\r\n'
|
||||
else
|
||||
output[1] = '"' .. table.concat(escapedHeaders, '"' .. delimiter .. '"') .. '"\r\n'
|
||||
end
|
||||
return output
|
||||
end
|
||||
|
||||
local function escapeHeadersForOutput(headers, delimiter, options)
|
||||
local escapedHeaders = {}
|
||||
local delimitField = delimitField
|
||||
if options and options.onlyRequiredQuotes == true then
|
||||
delimitField = generateDelimitAndQuoteField(delimiter)
|
||||
end
|
||||
for i = 1, #headers do
|
||||
escapedHeaders[i] = delimitField(headers[i])
|
||||
end
|
||||
|
||||
return escapedHeaders
|
||||
end
|
||||
|
||||
local function extractHeadersFromTable(inputTable)
|
||||
local headers = {}
|
||||
for key, _ in pairs(inputTable[1]) do
|
||||
headers[#headers+1] = key
|
||||
end
|
||||
|
||||
-- lets make the headers alphabetical
|
||||
table.sort(headers)
|
||||
|
||||
return headers
|
||||
end
|
||||
|
||||
local function getHeadersFromOptions(options)
|
||||
local headers = nil
|
||||
if options then
|
||||
if options.fieldsToKeep ~= nil then
|
||||
assert(
|
||||
type(options.fieldsToKeep) == "table", "ftcsv only takes in a list (as a table) for the optional parameter 'fieldsToKeep'. You passed in '" .. tostring(options.headers) .. "' of type '" .. type(options.headers) .. "'.")
|
||||
headers = options.fieldsToKeep
|
||||
end
|
||||
end
|
||||
return headers
|
||||
end
|
||||
|
||||
local function initializeGenerator(inputTable, delimiter, options)
|
||||
-- delimiter MUST be one character
|
||||
assert(#delimiter == 1 and type(delimiter) == "string", "the delimiter must be of string type and exactly one character")
|
||||
|
||||
local headers = getHeadersFromOptions(options)
|
||||
if headers == nil then
|
||||
headers = extractHeadersFromTable(inputTable)
|
||||
end
|
||||
validateHeaders(headers, inputTable)
|
||||
|
||||
local escapedHeaders = escapeHeadersForOutput(headers, delimiter, options)
|
||||
local output = initializeOutputWithEscapedHeaders(escapedHeaders, delimiter, options)
|
||||
return output, headers
|
||||
end
|
||||
|
||||
-- works really quickly with luajit-2.1, because table.concat life
|
||||
function ftcsv.encode(inputTable, delimiter, options)
|
||||
local delimiter, options = determineArgumentOrder(delimiter, options)
|
||||
local output, headers = initializeGenerator(inputTable, delimiter, options)
|
||||
|
||||
for i, line in csvLineGenerator(inputTable, delimiter, headers, options) do
|
||||
output[i+1] = line
|
||||
end
|
||||
|
||||
-- combine and return final string
|
||||
return table.concat(output)
|
||||
end
|
||||
|
||||
return ftcsv
|
||||
|
||||
5
home/environments/i3/conky/kat.lua
Normal file
5
home/environments/i3/conky/kat.lua
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
local _dir_ = debug.getinfo(1, "S").source:sub(2):match("(.*[/\\])") or "./"
|
||||
|
||||
kat = { root_dir = _dir_, exec_interval = 3600 }
|
||||
|
||||
return kat
|
||||
532
home/environments/i3/conky/liluat.lua
Normal file
532
home/environments/i3/conky/liluat.lua
Normal file
|
|
@ -0,0 +1,532 @@
|
|||
--[[
|
||||
-- liluat - Lightweight Lua Template engine
|
||||
--
|
||||
-- Project page: https://github.com/FSMaxB/liluat
|
||||
--
|
||||
-- liluat is based on slt2 by henix, see https://github.com/henix/slt2
|
||||
--
|
||||
-- Copyright © 2016 Max Bruckner
|
||||
-- Copyright © 2011-2016 henix
|
||||
--
|
||||
-- Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
-- of this software and associated documentation files (the "Software"), to deal
|
||||
-- in the Software without restriction, including without limitation the rights
|
||||
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
-- copies of the Software, and to permit persons to whom the Software is furnished
|
||||
-- to do so, subject to the following conditions:
|
||||
--
|
||||
-- The above copyright notice and this permission notice shall be included in
|
||||
-- all copies or substantial portions of the Software.
|
||||
--
|
||||
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
-- WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
-- IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
--]]
|
||||
|
||||
local liluat = {
|
||||
private = {} --used to expose private functions for testing
|
||||
}
|
||||
|
||||
-- print the current version
|
||||
liluat.version = function ()
|
||||
return "1.2.0"
|
||||
end
|
||||
|
||||
-- returns a string containing the fist line until the last line
|
||||
local function string_lines(lines, first, last)
|
||||
-- allow negative line numbers
|
||||
first = (first >= 1) and first or 1
|
||||
|
||||
local start_position
|
||||
local current_position = 1
|
||||
local line_counter = 1
|
||||
repeat
|
||||
if line_counter == first then
|
||||
start_position = current_position
|
||||
end
|
||||
current_position = lines:find('\n', current_position + 1, true)
|
||||
line_counter = line_counter + 1
|
||||
until (line_counter == (last + 1)) or (not current_position)
|
||||
|
||||
return lines:sub(start_position, current_position)
|
||||
end
|
||||
liluat.private.string_lines = string_lines
|
||||
|
||||
-- escape a string for use in lua patterns
|
||||
-- (this simply prepends all non alphanumeric characters with '%'
|
||||
local function escape_pattern(text)
|
||||
return text:gsub("([^%w])", "%%%1" --[[function (match) return "%"..match end--]])
|
||||
end
|
||||
liluat.private.escape_pattern = escape_pattern
|
||||
|
||||
-- recursively copy a table
|
||||
local function clone_table(table)
|
||||
local clone = {}
|
||||
|
||||
for key, value in pairs(table) do
|
||||
if type(value) == "table" then
|
||||
clone[key] = clone_table(value)
|
||||
else
|
||||
clone[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
return clone
|
||||
end
|
||||
liluat.private.clone_table = clone_table
|
||||
|
||||
-- recursively merge two tables, the second one has precedence
|
||||
-- if 'shallow' is set, the second table isn't copied recursively,
|
||||
-- its content is only referenced instead
|
||||
local function merge_tables(a, b, shallow)
|
||||
a = a or {}
|
||||
b = b or {}
|
||||
|
||||
local merged = clone_table(a)
|
||||
|
||||
for key, value in pairs(b) do
|
||||
if (type(value) == "table") and (not shallow) then
|
||||
if a[key] then
|
||||
merged[key] = merge_tables(a[key], value)
|
||||
else
|
||||
merged[key] = clone_table(value)
|
||||
end
|
||||
else
|
||||
merged[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
return merged
|
||||
end
|
||||
liluat.private.merge_tables = merge_tables
|
||||
|
||||
local default_options = {
|
||||
start_tag = "{{",
|
||||
end_tag = "}}",
|
||||
trim_right = "code",
|
||||
trim_left = "code"
|
||||
}
|
||||
|
||||
-- initialise table of options (use the provided, default otherwise)
|
||||
local function initialise_options(options)
|
||||
return merge_tables(default_options, options)
|
||||
end
|
||||
|
||||
-- creates an iterator that iterates over all chunks in the given template
|
||||
-- a chunk is either a template delimited by start_tag and end_tag or a normal text
|
||||
-- the iterator also returns the type of the chunk as second return value
|
||||
local function all_chunks(template, options)
|
||||
options = initialise_options(options)
|
||||
|
||||
-- pattern to match a template chunk
|
||||
local template_pattern = escape_pattern(options.start_tag) .. "([+-]?)(.-)([+-]?)" .. escape_pattern(options.end_tag)
|
||||
local include_pattern = "^"..escape_pattern(options.start_tag) .. "[+-]?include:(.-)[+-]?" .. escape_pattern(options.end_tag)
|
||||
local expression_pattern = "^"..escape_pattern(options.start_tag) .. "[+-]?=(.-)[+-]?" .. escape_pattern(options.end_tag)
|
||||
local position = 1
|
||||
|
||||
return function ()
|
||||
if not position then
|
||||
return nil
|
||||
end
|
||||
|
||||
local template_start, template_end, trim_left, template_capture, trim_right = template:find(template_pattern, position)
|
||||
|
||||
local chunk = {}
|
||||
if template_start == position then -- next chunk is a template chunk
|
||||
if trim_left == "+" then
|
||||
chunk.trim_left = false
|
||||
elseif trim_left == "-" then
|
||||
chunk.trim_left = true
|
||||
end
|
||||
if trim_right == "+" then
|
||||
chunk.trim_right = false
|
||||
elseif trim_right == "-" then
|
||||
chunk.trim_right = true
|
||||
end
|
||||
|
||||
local include_start, include_end, include_capture = template:find(include_pattern, position)
|
||||
local expression_start, expression_end, expression_capture
|
||||
if not include_start then
|
||||
expression_start, expression_end, expression_capture = template:find(expression_pattern, position)
|
||||
end
|
||||
|
||||
if include_start then
|
||||
chunk.type = "include"
|
||||
chunk.text = include_capture
|
||||
elseif expression_start then
|
||||
chunk.type = "expression"
|
||||
chunk.text = expression_capture
|
||||
else
|
||||
chunk.type = "code"
|
||||
chunk.text = template_capture
|
||||
end
|
||||
|
||||
position = template_end + 1
|
||||
return chunk
|
||||
elseif template_start then -- next chunk is a text chunk
|
||||
chunk.type = "text"
|
||||
chunk.text = template:sub(position, template_start - 1)
|
||||
position = template_start
|
||||
return chunk
|
||||
else -- no template chunk found --> either text chunk until end of file or no chunk at all
|
||||
chunk.text = template:sub(position)
|
||||
chunk.type = "text"
|
||||
position = nil
|
||||
return (#chunk.text > 0) and chunk or nil
|
||||
end
|
||||
end
|
||||
end
|
||||
liluat.private.all_chunks = all_chunks
|
||||
|
||||
local function read_entire_file(path)
|
||||
assert(path)
|
||||
local file = assert(io.open(path))
|
||||
local file_content = file:read('*a')
|
||||
file:close()
|
||||
return file_content
|
||||
end
|
||||
liluat.private.read_entire_file = read_entire_file
|
||||
|
||||
-- a whitelist of allowed functions
|
||||
local sandbox_whitelist = {
|
||||
ipairs = ipairs,
|
||||
next = next,
|
||||
pairs = pairs,
|
||||
rawequal = rawequal,
|
||||
rawget = rawget,
|
||||
rawset = rawset,
|
||||
select = select,
|
||||
tonumber = tonumber,
|
||||
tostring = tostring,
|
||||
type = type,
|
||||
unpack = unpack,
|
||||
string = string,
|
||||
table = table,
|
||||
math = math,
|
||||
os = {
|
||||
date = os.date,
|
||||
difftime = os.difftime,
|
||||
time = os.time,
|
||||
},
|
||||
coroutine = coroutine
|
||||
}
|
||||
|
||||
-- puts line numbers in front of a string and optionally highlights a single line
|
||||
local function prepend_line_numbers(lines, first, highlight)
|
||||
first = (first and (first >= 1)) and first or 1
|
||||
lines = lines:gsub("\n$", "") -- make sure the last line isn't empty
|
||||
lines = lines:gsub("^\n", "") -- make sure the first line isn't empty
|
||||
|
||||
local current_line = first + 1
|
||||
return string.format("%3d: ", first) .. lines:gsub('\n', function ()
|
||||
local highlight_char = ' '
|
||||
if current_line == tonumber(highlight) then
|
||||
highlight_char = '> '
|
||||
end
|
||||
|
||||
local replacement = string.format("\n%3d:%s", current_line, highlight_char)
|
||||
current_line = current_line + 1
|
||||
|
||||
return replacement
|
||||
end)
|
||||
end
|
||||
liluat.private.prepend_line_numbers = prepend_line_numbers
|
||||
|
||||
-- creates a function in a sandbox from a given code,
|
||||
-- name of the execution context and an environment
|
||||
-- that will be available inside the sandbox,
|
||||
-- optionally overwrite the whitelist
|
||||
local function sandbox(code, name, environment, whitelist, reference)
|
||||
whitelist = whitelist or sandbox_whitelist
|
||||
name = name or 'unknown'
|
||||
|
||||
-- prepare the environment
|
||||
environment = merge_tables(whitelist, environment, reference)
|
||||
|
||||
local func
|
||||
local error_message
|
||||
if setfenv then --Lua 5.1 and compatible
|
||||
if code:byte(1) == 27 then
|
||||
error("Lua bytecode not permitted.", 2)
|
||||
end
|
||||
func, error_message = loadstring(code)
|
||||
if func then
|
||||
setfenv(func, environment)
|
||||
end
|
||||
else -- Lua 5.2 and later
|
||||
func, error_message = load(code, name, 't', environment)
|
||||
end
|
||||
|
||||
-- handle compile error and print pretty error message
|
||||
if not func then
|
||||
local line_number, message = error_message:match(":(%d+):(.*)")
|
||||
-- lines before and after the error
|
||||
local lines = string_lines(code, line_number - 3, line_number + 3)
|
||||
error(
|
||||
'Syntax error in sandboxed code "' .. name .. '" in line ' .. line_number .. ':\n'
|
||||
.. message .. '\n\n'
|
||||
.. prepend_line_numbers(lines, line_number - 3, line_number),
|
||||
3
|
||||
)
|
||||
end
|
||||
|
||||
return func
|
||||
end
|
||||
liluat.private.sandbox = sandbox
|
||||
|
||||
local function parse_string_literal(string_literal)
|
||||
return sandbox('return' .. string_literal, nil, nil, {})()
|
||||
end
|
||||
liluat.private.parse_string_literal = parse_string_literal
|
||||
|
||||
-- add an include to the include_list and throw an error if
|
||||
-- an inclusion cycle is detected
|
||||
local function add_include_and_detect_cycles(include_list, path)
|
||||
local parent = include_list[0]
|
||||
while parent do -- while the root hasn't been reached
|
||||
if parent[path] then
|
||||
error("Cyclic inclusion detected")
|
||||
end
|
||||
|
||||
parent = parent[0]
|
||||
end
|
||||
|
||||
include_list[path] = {
|
||||
[0] = include_list
|
||||
}
|
||||
end
|
||||
liluat.private.add_include_and_detect_cycles = add_include_and_detect_cycles
|
||||
|
||||
-- extract the name of a directory from a path
|
||||
local function dirname(path)
|
||||
return path:match("^(.*/).-$") or ""
|
||||
end
|
||||
liluat.private.dirname = dirname
|
||||
|
||||
-- splits a template into chunks
|
||||
-- chunks are either a template delimited by start_tag and end_tag
|
||||
-- or a text chunk (everything else)
|
||||
-- @return table
|
||||
local function parse(template, options, output, include_list, current_path)
|
||||
options = initialise_options(options)
|
||||
current_path = current_path or "." -- current include path
|
||||
|
||||
include_list = include_list or {} -- a list of files that were included
|
||||
local output = output or {}
|
||||
|
||||
for chunk in all_chunks(template, options) do
|
||||
-- handle includes
|
||||
if chunk.type == "include" then -- include chunk
|
||||
local include_path_literal = chunk.text
|
||||
local path = parse_string_literal(include_path_literal)
|
||||
|
||||
-- build complete path
|
||||
if path:find("^/") then
|
||||
--absolute path, don't modify
|
||||
elseif options.base_path then
|
||||
path = options.base_path .. "/" .. path
|
||||
else
|
||||
path = dirname(current_path) .. path
|
||||
end
|
||||
|
||||
add_include_and_detect_cycles(include_list, path)
|
||||
|
||||
local included_template = read_entire_file(path)
|
||||
parse(included_template, options, output, include_list[path], path)
|
||||
elseif (chunk.type == "text") and output[#output] and (output[#output].type == "text") then
|
||||
-- ensure that no two text chunks follow each other
|
||||
output[#output].text = output[#output].text .. chunk.text
|
||||
else -- other chunk
|
||||
table.insert(output, chunk)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
return output
|
||||
end
|
||||
liluat.private.parse = parse
|
||||
|
||||
-- inline included template files
|
||||
-- @return string
|
||||
function liluat.inline(template, options, start_path)
|
||||
options = initialise_options(options)
|
||||
|
||||
local output = {}
|
||||
for _,chunk in ipairs(parse(template, options, nil, nil, start_path)) do
|
||||
if chunk.type == "expression" then
|
||||
table.insert(output, options.start_tag .. "=" .. chunk.text .. options.end_tag)
|
||||
elseif chunk.type == "code" then
|
||||
table.insert(output, options.start_tag .. chunk.text .. options.end_tag)
|
||||
else
|
||||
table.insert(output, chunk.text)
|
||||
end
|
||||
end
|
||||
|
||||
return table.concat(output)
|
||||
end
|
||||
|
||||
-- @return { string }
|
||||
function liluat.get_dependencies(template, options, start_path)
|
||||
options = initialise_options(options)
|
||||
|
||||
local include_list = {}
|
||||
parse(template, options, nil, include_list, start_path)
|
||||
|
||||
local dependencies = {}
|
||||
local have_seen = {} -- list of includes that were already added
|
||||
local function recursive_traversal(list)
|
||||
for key, value in pairs(list) do
|
||||
if (type(key) == "string") and (not have_seen[key]) then
|
||||
have_seen[key] = true
|
||||
table.insert(dependencies, key)
|
||||
recursive_traversal(value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
recursive_traversal(include_list)
|
||||
return dependencies
|
||||
end
|
||||
|
||||
-- compile a template into lua code
|
||||
-- @return { name = string, code = string / function}
|
||||
function liluat.compile(template, options, template_name, start_path)
|
||||
options = initialise_options(options)
|
||||
template_name = template_name or 'liluat.compile'
|
||||
|
||||
local output_function = "__liluat_output_function"
|
||||
|
||||
-- split the template string into chunks
|
||||
local lexed_template = parse(template, options, nil, nil, start_path)
|
||||
|
||||
-- table of code fragments the template is compiled into
|
||||
local lua_code = {}
|
||||
|
||||
for i, chunk in ipairs(lexed_template) do
|
||||
-- check if the chunk is a template (either code or expression)
|
||||
if chunk.type == "expression" then
|
||||
table.insert(lua_code, output_function..'('..chunk.text..')')
|
||||
elseif chunk.type == "code" then
|
||||
table.insert(lua_code, chunk.text)
|
||||
else --text chunk
|
||||
-- determine if this block needs to be trimmed right
|
||||
-- (strip newline)
|
||||
local trim_right = false
|
||||
if lexed_template[i - 1] and (lexed_template[i - 1].trim_right == true) then
|
||||
trim_right = true
|
||||
elseif lexed_template[i - 1] and (lexed_template[i - 1].trim_right == false) then
|
||||
trim_right = false
|
||||
elseif options.trim_right == "all" then
|
||||
trim_right = true
|
||||
elseif options.trim_right == "code" then
|
||||
trim_right = lexed_template[i - 1] and (lexed_template[i - 1].type == "code")
|
||||
elseif options.trim_right == "expression" then
|
||||
trim_right = lexed_template[i - 1] and (lexed_template[i - 1].type == "expression")
|
||||
end
|
||||
|
||||
-- determine if this block needs to be trimmed left
|
||||
-- (strip whitespaces in front)
|
||||
local trim_left = false
|
||||
if lexed_template[i + 1] and (lexed_template[i + 1].trim_left == true) then
|
||||
trim_left = true
|
||||
elseif lexed_template[i + 1] and (lexed_template[i + 1].trim_left == false) then
|
||||
trim_left = false
|
||||
elseif options.trim_left == "all" then
|
||||
trim_left = true
|
||||
elseif options.trim_left == "code" then
|
||||
trim_left = lexed_template[i + 1] and (lexed_template[i + 1].type == "code")
|
||||
elseif options.trim_left == "expression" then
|
||||
trim_left = lexed_template[i + 1] and (lexed_template[i + 1].type == "expression")
|
||||
end
|
||||
|
||||
if trim_right and trim_left then
|
||||
-- both at once
|
||||
if i == 1 then
|
||||
if chunk.text:find("^.*\n") then
|
||||
chunk.text = chunk.text:match("^(.*\n)%s-$")
|
||||
elseif chunk.text:find("^%s-$") then
|
||||
chunk.text = ""
|
||||
end
|
||||
elseif chunk.text:find("^\n") then --have to trim a newline
|
||||
if chunk.text:find("^\n.*\n") then --at least two newlines
|
||||
chunk.text = chunk.text:match("^\n(.*\n)%s-$") or chunk.text:match("^\n(.*)$")
|
||||
elseif chunk.text:find("^\n%s-$") then
|
||||
chunk.text = ""
|
||||
else
|
||||
chunk.text = chunk.text:gsub("^\n", "")
|
||||
end
|
||||
else
|
||||
chunk.text = chunk.text:match("^(.*\n)%s-$") or chunk.text
|
||||
end
|
||||
elseif trim_left then
|
||||
if i == 1 and chunk.text:find("^%s-$") then
|
||||
chunk.text = ""
|
||||
else
|
||||
chunk.text = chunk.text:match("^(.*\n)%s-$") or chunk.text
|
||||
end
|
||||
elseif trim_right then
|
||||
chunk.text = chunk.text:gsub("^\n", "")
|
||||
end
|
||||
if not (chunk.text == "") then
|
||||
table.insert(lua_code, output_function..'('..string.format("%q", chunk.text)..')')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
name = template_name,
|
||||
code = table.concat(lua_code, '\n')
|
||||
}
|
||||
end
|
||||
|
||||
-- compile a file
|
||||
-- @return { name = string, code = string / function }
|
||||
function liluat.compile_file(filename, options)
|
||||
return liluat.compile(read_entire_file(filename), options, filename, filename)
|
||||
end
|
||||
|
||||
-- @return a coroutine function
|
||||
function liluat.render_coroutine(template, environment, options)
|
||||
options = initialise_options(options)
|
||||
environment = merge_tables({__liluat_output_function = coroutine.yield}, environment, options.reference)
|
||||
|
||||
return sandbox(template.code, template.name, environment, nil, options.reference)
|
||||
end
|
||||
|
||||
-- @return string
|
||||
function liluat.render(t, env, options)
|
||||
options = initialise_options(options)
|
||||
|
||||
local result = {}
|
||||
|
||||
-- add closure that renders the text into the result table
|
||||
env = merge_tables({
|
||||
__liluat_output_function = function (text)
|
||||
table.insert(result, text) end
|
||||
},
|
||||
env,
|
||||
options.reference
|
||||
)
|
||||
|
||||
-- compile and run the lua code
|
||||
local render_function = sandbox(t.code, t.name, env, nil, options.reference)
|
||||
local status, error_message = pcall(render_function)
|
||||
if not status then
|
||||
local line_number, message = error_message:match(":(%d+):(.*)")
|
||||
-- lines before and after the error
|
||||
local lines = string_lines(t.code, line_number - 3, line_number + 3)
|
||||
error(
|
||||
'Runtime error in sandboxed code "' .. t.name .. '" in line ' .. line_number .. ':\n'
|
||||
.. message .. '\n\n'
|
||||
.. prepend_line_numbers(lines, line_number - 3, line_number),
|
||||
2
|
||||
)
|
||||
end
|
||||
|
||||
return table.concat(result)
|
||||
end
|
||||
|
||||
return liluat
|
||||
|
|
@ -17,30 +17,25 @@ in {
|
|||
xclip
|
||||
];
|
||||
services.i3gopher.enable = true;
|
||||
xsession.windowManager.i3 = {
|
||||
enable = true;
|
||||
extraConfig = ''
|
||||
workspace 1 output DP-2
|
||||
workspace 11 output HDMI-0
|
||||
for_window [class="^steam_app_default$"] floating enable
|
||||
'';
|
||||
config = let
|
||||
xsession.windowManager.i3 = let
|
||||
modifier = "Mod4";
|
||||
other_modifier = "Mod1";
|
||||
mod = modifier;
|
||||
mod2 = other_modifier;
|
||||
|
||||
runCommand = "${config.programs.rofi.finalPackage}/bin/rofi -show combi -modes combi";
|
||||
workspaceNames = {
|
||||
"1" = "";
|
||||
"2" = "";
|
||||
"11" = "";
|
||||
"12" = "";
|
||||
"13" = "";
|
||||
"1" = " Term";
|
||||
"2" = " GW2";
|
||||
"3" = " GW1";
|
||||
"4" = " Web";
|
||||
"11" = " IM";
|
||||
"12" = " Web";
|
||||
"13" = " Media";
|
||||
"14" = " Music";
|
||||
};
|
||||
workspaceNamer = num: let
|
||||
numStr = builtins.toString num;
|
||||
in if numStr ? workspaceNames then "${numStr}:${numStr} ${workspaceNames.numStr}" else "${numStr}:${numStr}";
|
||||
in if workspaceNames ? ${numStr} then "${numStr}:${workspaceNames.${numStr}}" else "${numStr}:${numStr}";
|
||||
|
||||
lockCommand = "sh -c '${pkgs.i3lock-fancy-rapid}/bin/i3lock 5 3 & sleep 5 && xset dpms force off'";
|
||||
|
||||
|
|
@ -49,6 +44,32 @@ in {
|
|||
gapsOuterMode = "Outer Gaps: +|-|0 (local), Shift + +|-|0 (global)";
|
||||
gapsInnerMode = "Inner Gaps: +|-|0 (local), Shift + +|-|0 (global)";
|
||||
in {
|
||||
enable = true;
|
||||
extraConfig = ''
|
||||
workspace 1 output DP-2 gaps inner 10
|
||||
workspace 2 output DP-2
|
||||
workspace 3 output DP-2
|
||||
workspace 4 output DP-2
|
||||
workspace 5 output DP-2
|
||||
workspace 5 output DP-2
|
||||
workspace 6 output DP-2
|
||||
workspace 7 output DP-2
|
||||
workspace 8 output DP-2
|
||||
workspace 9 output DP-2
|
||||
workspace 0 output DP-2
|
||||
workspace 11 output HDMI-0
|
||||
workspace 12 output HDMI-0
|
||||
workspace 13 output HDMI-0
|
||||
workspace 14 output HDMI-0
|
||||
workspace 15 output HDMI-0
|
||||
workspace 16 output HDMI-0
|
||||
workspace 17 output HDMI-0
|
||||
workspace 18 output HDMI-0
|
||||
workspace 19 output HDMI-0
|
||||
workspace 20 output HDMI-0
|
||||
for_window [class="^steam_app_default$"] floating enable
|
||||
'';
|
||||
config = {
|
||||
inherit modifier;
|
||||
fonts = {
|
||||
size = 10.0;
|
||||
|
|
@ -100,12 +121,24 @@ in {
|
|||
in mkMerge (map mapDefaultAttrs ([ normalBindings ] ++ workspaceBindings));
|
||||
|
||||
assigns = {
|
||||
/*${workspaceNamer 2} = [
|
||||
${workspaceNamer 2} = [
|
||||
{
|
||||
class = "^steam_app_default$";
|
||||
title = "^Guild Wars 2$";
|
||||
}
|
||||
];*/
|
||||
${workspaceNamer 13} = [
|
||||
];
|
||||
${workspaceNamer 3} = [
|
||||
{
|
||||
class = "^steam_app_default$";
|
||||
title = "^Guild Wars$";
|
||||
}
|
||||
];
|
||||
${workspaceNamer 11} = [
|
||||
{
|
||||
class = "^Discord$";
|
||||
}
|
||||
];
|
||||
${workspaceNamer 14} = [
|
||||
{
|
||||
class = "^Spotify$";
|
||||
}
|
||||
|
|
@ -141,8 +174,8 @@ in {
|
|||
${gapsMode} =
|
||||
defaultPath
|
||||
// {
|
||||
"o" = "mode '${gapsOuterMode}'";
|
||||
"i" = "mode '${gapsInnerMode}'";
|
||||
"o" = ''mode "${gapsOuterMode}"'';
|
||||
"i" = ''mode "${gapsInnerMode}"'';
|
||||
};
|
||||
${actionMode} =
|
||||
defaultPath
|
||||
|
|
@ -218,7 +251,7 @@ in {
|
|||
"FontAwesome 6 Free"
|
||||
"FontAwesome 6 Brands"
|
||||
];
|
||||
size = 9.0;
|
||||
size = 10.0;
|
||||
};
|
||||
colors = {
|
||||
background = "$base00";
|
||||
|
|
|
|||
|
|
@ -1,9 +1,18 @@
|
|||
_: {
|
||||
{ pkgs, ... }: {
|
||||
programs.i3status-rust = {
|
||||
enable = true;
|
||||
bars = {
|
||||
# YOU! I WANNA TAKE YOU TO A
|
||||
gaybar = {
|
||||
settings = {
|
||||
icons = {
|
||||
icons = "awesome6";
|
||||
overrides = {
|
||||
caffeine_on = "";
|
||||
caffeine_off = "";
|
||||
};
|
||||
};
|
||||
};
|
||||
blocks = [
|
||||
{
|
||||
block = "cpu";
|
||||
|
|
@ -41,6 +50,16 @@ _: {
|
|||
block = "notify";
|
||||
format = " $icon {($notification_count.eng(w:1)) |}";
|
||||
}
|
||||
{
|
||||
block = "toggle";
|
||||
command_on = "${pkgs.xorg.xset}/bin/xset -dpms";
|
||||
command_off = "${pkgs.xorg.xset}/bin/xset +dpms";
|
||||
format = " $icon DPMS ";
|
||||
command_state = ''${pkgs.xorg.xset}/bin/xset q | ${pkgs.gnugrep}/bin/grep -F "DPMS is Disabled"'';
|
||||
icon_on = "caffeine_on";
|
||||
icon_off = "caffeine_off";
|
||||
state_on = "info";
|
||||
}
|
||||
{
|
||||
block = "time";
|
||||
interval = 1;
|
||||
|
|
@ -48,8 +67,7 @@ _: {
|
|||
}
|
||||
];
|
||||
theme = "ctp-latte";
|
||||
icons = "awesome6";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@
|
|||
in {
|
||||
home.packages = with pkgs; [
|
||||
discord
|
||||
betterdiscordctl
|
||||
];
|
||||
xdg.configFile."discord/settings.json".text = toJSON {} {
|
||||
"SKIP_HOST_UPDATE" = true;
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@
|
|||
main = {
|
||||
id = 0;
|
||||
isDefault = true;
|
||||
extensions.packages = with nur.repos.rycee.firefox-addons; [
|
||||
extensions = {
|
||||
packages = with nur.repos.rycee.firefox-addons; [
|
||||
sponsorblock
|
||||
link-cleaner
|
||||
canvasblocker
|
||||
|
|
@ -32,8 +33,14 @@
|
|||
multi-account-containers
|
||||
dearrow
|
||||
ublock-origin
|
||||
betterttv
|
||||
violentmonkey
|
||||
return-youtube-dislikes
|
||||
iina-open-in-mpv
|
||||
];
|
||||
settings = {
|
||||
};
|
||||
};
|
||||
settings = {
|
||||
# Derived from https://github.com/arcnmx/home/blob/9eb1cd4dd43883e1a0c6a2a55c00d7c3bede1776/cfg/firefox/default.nix#L7
|
||||
# and https://git.ztn.sh/zotan/snowleopard/src/branch/dev/assets/prefs.js
|
||||
|
|
|
|||
|
|
@ -32,6 +32,8 @@
|
|||
v4l-utils # Webcam
|
||||
obsidian
|
||||
remmina
|
||||
alsa-utils
|
||||
pwvucontrol
|
||||
veracrypt
|
||||
gimp
|
||||
];
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@
|
|||
];
|
||||
initialize = true;
|
||||
passwordFile = config.sops.secrets.restic-password-file.path;
|
||||
repository = "sftp:u401227@u401227.your-storagebox.de:/restic/koishi";
|
||||
repository = "sftp:u401227@u401227.your-storagebox.de:/restic/${config.networking.hostName}";
|
||||
timerConfig = {
|
||||
OnCalendar = "00:05";
|
||||
RandomizedDelaySec = "5h";
|
||||
|
|
|
|||
|
|
@ -95,6 +95,8 @@ _: let
|
|||
|
||||
environment.systemPackages = with pkgs; [
|
||||
ledfx
|
||||
openrgb
|
||||
nvtopPackages.nvidia
|
||||
];
|
||||
system.stateVersion = "21.11";
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue