2019-08-15 19:43:55 +00:00
|
|
|
<?php
|
|
|
|
|
2021-08-12 08:04:14 +00:00
|
|
|
function getDirForEmail($email)
|
|
|
|
{
|
|
|
|
return realpath(ROOT.DS.'..'.DS.'data'.DS.$email);
|
|
|
|
}
|
|
|
|
|
2019-08-15 19:43:55 +00:00
|
|
|
function startsWith($haystack, $needle)
|
|
|
|
{
|
|
|
|
$length = strlen($needle);
|
|
|
|
return (substr($haystack, 0, $length) === $needle);
|
|
|
|
}
|
|
|
|
|
|
|
|
function endsWith($haystack, $needle)
|
|
|
|
{
|
|
|
|
$length = strlen($needle);
|
|
|
|
if ($length == 0) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return (substr($haystack, -$length) === $needle);
|
|
|
|
}
|
|
|
|
|
|
|
|
function getEmail($email,$id)
|
|
|
|
{
|
2021-08-12 08:04:14 +00:00
|
|
|
return json_decode(file_get_contents(getDirForEmail($email).DS.$id.'.json'),true);
|
2019-08-15 19:43:55 +00:00
|
|
|
}
|
|
|
|
|
2022-02-08 23:17:55 +00:00
|
|
|
function getRawEmail($email,$id)
|
|
|
|
{
|
|
|
|
$data = json_decode(file_get_contents(getDirForEmail($email).DS.$id.'.json'),true);
|
|
|
|
|
|
|
|
return $data['raw'];
|
|
|
|
}
|
|
|
|
|
2019-08-15 19:43:55 +00:00
|
|
|
function emailIDExists($email,$id)
|
|
|
|
{
|
2021-08-12 08:04:14 +00:00
|
|
|
return file_exists(getDirForEmail($email).DS.$id.'.json');
|
2019-08-15 19:43:55 +00:00
|
|
|
}
|
|
|
|
|
2023-11-11 15:41:14 +00:00
|
|
|
function getEmailsOfEmail($email,$includebody=false,$includeattachments=false)
|
2019-08-15 19:43:55 +00:00
|
|
|
{
|
2021-11-27 17:09:05 +00:00
|
|
|
$o = [];
|
2022-02-09 18:05:20 +00:00
|
|
|
$settings = loadSettings();
|
|
|
|
|
|
|
|
if($settings['ADMIN'] && $settings['ADMIN']==$email)
|
|
|
|
{
|
|
|
|
$emails = listEmailAdresses();
|
|
|
|
if(count($emails)>0)
|
|
|
|
{
|
|
|
|
foreach($emails as $email)
|
|
|
|
{
|
|
|
|
if ($handle = opendir(getDirForEmail($email))) {
|
|
|
|
while (false !== ($entry = readdir($handle))) {
|
|
|
|
if (endsWith($entry,'.json')) {
|
|
|
|
$time = substr($entry,0,-5);
|
|
|
|
$json = json_decode(file_get_contents(getDirForEmail($email).DS.$entry),true);
|
2022-02-09 19:04:49 +00:00
|
|
|
$o[$time] = array(
|
|
|
|
'email'=>$email,'id'=>$time,
|
|
|
|
'from'=>$json['parsed']['from'],
|
|
|
|
'subject'=>$json['parsed']['subject'],
|
|
|
|
'md5'=>md5($time.$json['raw']),
|
|
|
|
'maillen'=>strlen($json['raw'])
|
|
|
|
);
|
2023-11-11 15:41:14 +00:00
|
|
|
if($includebody==true)
|
|
|
|
$o[$time]['body'] = $json['parsed']['body'];
|
|
|
|
if($includeattachments==true)
|
|
|
|
{
|
|
|
|
$o[$time]['attachments'] = $json['parsed']['attachments'];
|
|
|
|
//add url to attachments
|
|
|
|
foreach($o[$time]['attachments'] as $k=>$v)
|
|
|
|
$o[$time]['attachments'][$k] = $settings['URL'].'/api/attachment/'.$email.'/'. $v;
|
|
|
|
}
|
2022-02-09 18:05:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
closedir($handle);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if ($handle = opendir(getDirForEmail($email))) {
|
|
|
|
while (false !== ($entry = readdir($handle))) {
|
|
|
|
if (endsWith($entry,'.json')) {
|
|
|
|
$time = substr($entry,0,-5);
|
|
|
|
$json = json_decode(file_get_contents(getDirForEmail($email).DS.$entry),true);
|
2023-11-11 15:41:14 +00:00
|
|
|
$o[$time] = array(
|
|
|
|
'email'=>$email,
|
|
|
|
'id'=>$time,
|
|
|
|
'from'=>$json['parsed']['from'],
|
|
|
|
'subject'=>$json['parsed']['subject'],
|
|
|
|
'md5'=>md5($time.$json['raw']),'maillen'=>strlen($json['raw'])
|
|
|
|
);
|
|
|
|
if($includebody==true)
|
|
|
|
$o[$time]['body'] = $json['parsed']['body'];
|
|
|
|
if($includeattachments==true)
|
|
|
|
{
|
|
|
|
$o[$time]['attachments'] = $json['parsed']['attachments'];
|
|
|
|
//add url to attachments
|
|
|
|
foreach($o[$time]['attachments'] as $k=>$v)
|
|
|
|
$o[$time]['attachments'][$k] = $settings['URL'].'/api/attachment/'.$email.'/'. $v;
|
|
|
|
}
|
|
|
|
}
|
2019-08-15 19:43:55 +00:00
|
|
|
}
|
2022-02-09 18:05:20 +00:00
|
|
|
closedir($handle);
|
2019-08-15 19:43:55 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if(is_array($o))
|
|
|
|
ksort($o);
|
|
|
|
|
|
|
|
return $o;
|
|
|
|
}
|
|
|
|
|
2019-08-18 19:02:04 +00:00
|
|
|
function listEmailAdresses()
|
|
|
|
{
|
|
|
|
$o = array();
|
|
|
|
if ($handle = opendir(ROOT.DS.'..'.DS.'data'.DS)) {
|
|
|
|
while (false !== ($entry = readdir($handle))) {
|
|
|
|
if(filter_var($entry, FILTER_VALIDATE_EMAIL))
|
|
|
|
$o[] = $entry;
|
|
|
|
}
|
|
|
|
closedir($handle);
|
|
|
|
}
|
|
|
|
|
|
|
|
return $o;
|
|
|
|
}
|
|
|
|
|
2023-11-21 21:33:44 +00:00
|
|
|
function attachmentExists($email,$id,$attachment=false)
|
2023-11-08 18:24:48 +00:00
|
|
|
{
|
2023-11-21 21:33:44 +00:00
|
|
|
return file_exists(getDirForEmail($email).DS.'attachments'.DS.$id.(($attachment)?'-'.$attachment:''));
|
2023-11-08 18:24:48 +00:00
|
|
|
}
|
|
|
|
|
2022-01-23 23:04:45 +00:00
|
|
|
function listAttachmentsOfMailID($email,$id)
|
|
|
|
{
|
2023-11-24 16:06:37 +00:00
|
|
|
$data = json_decode(file_get_contents(getDirForEmail($email).DS.$id.'.json'),true);
|
|
|
|
$attachments = $data['parsed']['attachments'];
|
|
|
|
if(!is_array($attachments))
|
|
|
|
return [];
|
|
|
|
else
|
|
|
|
return $attachments;
|
2022-01-23 23:04:45 +00:00
|
|
|
}
|
|
|
|
|
2022-02-08 23:38:45 +00:00
|
|
|
function deleteEmail($email,$id)
|
|
|
|
{
|
|
|
|
$dir = getDirForEmail($email);
|
|
|
|
$attachments = listAttachmentsOfMailID($email,$id);
|
|
|
|
foreach($attachments as $attachment)
|
|
|
|
unlink($dir.DS.'attachments'.DS.$attachment);
|
2022-05-15 16:54:43 +00:00
|
|
|
return unlink($dir.DS.$id.'.json');
|
2022-02-08 23:38:45 +00:00
|
|
|
}
|
|
|
|
|
2022-01-23 23:04:45 +00:00
|
|
|
|
2019-08-15 19:43:55 +00:00
|
|
|
function loadSettings()
|
|
|
|
{
|
|
|
|
if(file_exists(ROOT.DS.'..'.DS.'config.ini'))
|
|
|
|
return parse_ini_file(ROOT.DS.'..'.DS.'config.ini');
|
|
|
|
return false;
|
2021-08-12 08:04:14 +00:00
|
|
|
}
|
2023-11-08 18:24:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
function escape($str)
|
|
|
|
{
|
|
|
|
return htmlspecialchars($str, ENT_QUOTES, 'UTF-8');
|
2023-11-08 21:29:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function array2ul($array)
|
|
|
|
{
|
|
|
|
$out = "<ul>";
|
|
|
|
foreach ($array as $key => $elem) {
|
|
|
|
$out .= "<li>$elem</li>";
|
|
|
|
}
|
|
|
|
$out .= "</ul>";
|
|
|
|
return $out;
|
2023-11-09 22:21:17 +00:00
|
|
|
}
|
|
|
|
|
2023-11-18 09:16:59 +00:00
|
|
|
function tailShell($filepath, $lines = 1) {
|
|
|
|
ob_start();
|
|
|
|
passthru('tail -' . $lines . ' ' . escapeshellarg($filepath));
|
|
|
|
return trim(ob_get_clean());
|
|
|
|
}
|
|
|
|
|
2023-11-22 11:26:09 +00:00
|
|
|
function getUserIP()
|
|
|
|
{
|
|
|
|
if($_SERVER['HTTP_CF_CONNECTING_IP'])
|
|
|
|
return $_SERVER['HTTP_CF_CONNECTING_IP'];
|
|
|
|
$client = @$_SERVER['HTTP_CLIENT_IP'];
|
|
|
|
$forward = @$_SERVER['HTTP_X_FORWARDED_FOR'];
|
|
|
|
$remote = $_SERVER['REMOTE_ADDR'];
|
|
|
|
|
|
|
|
if(strpos($forward,','))
|
|
|
|
{
|
|
|
|
$a = explode(',',$forward);
|
|
|
|
$forward = trim($a[0]);
|
|
|
|
}
|
|
|
|
if(filter_var($forward, FILTER_VALIDATE_IP))
|
|
|
|
{
|
|
|
|
$ip = $forward;
|
|
|
|
}
|
|
|
|
elseif(filter_var($client, FILTER_VALIDATE_IP))
|
|
|
|
{
|
|
|
|
$ip = $client;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
$ip = $remote;
|
|
|
|
}
|
|
|
|
return $ip;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Check if a given IPv4 or IPv6 is in a network
|
|
|
|
* @param string $ip IP to check in IPV4 format eg. 127.0.0.1
|
|
|
|
* @param string $range IP/CIDR netmask eg. 127.0.0.0/24, or 2001:db8::8a2e:370:7334/128
|
|
|
|
* @return boolean true if the ip is in this range / false if not.
|
|
|
|
* via https://stackoverflow.com/a/56050595/1174516
|
|
|
|
*/
|
|
|
|
function isIPInRange( $ip, $range ) {
|
|
|
|
|
|
|
|
if(strpos($range,',')!==false)
|
|
|
|
{
|
|
|
|
// we got a list of ranges. splitting
|
|
|
|
$ranges = array_map('trim',explode(',',$range));
|
|
|
|
foreach($ranges as $range)
|
|
|
|
if(isIPInRange($ip,$range)) return true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
// Get mask bits
|
|
|
|
list($net, $maskBits) = explode('/', $range);
|
|
|
|
|
|
|
|
// Size
|
|
|
|
$size = (strpos($ip, ':') === false) ? 4 : 16;
|
|
|
|
|
|
|
|
// Convert to binary
|
|
|
|
$ip = inet_pton($ip);
|
|
|
|
$net = inet_pton($net);
|
|
|
|
if (!$ip || !$net) {
|
|
|
|
throw new InvalidArgumentException('Invalid IP address');
|
|
|
|
}
|
|
|
|
|
|
|
|
// Build mask
|
|
|
|
$solid = floor($maskBits / 8);
|
|
|
|
$solidBits = $solid * 8;
|
|
|
|
$mask = str_repeat(chr(255), $solid);
|
|
|
|
for ($i = $solidBits; $i < $maskBits; $i += 8) {
|
|
|
|
$bits = max(0, min(8, $maskBits - $i));
|
|
|
|
$mask .= chr((pow(2, $bits) - 1) << (8 - $bits));
|
|
|
|
}
|
|
|
|
$mask = str_pad($mask, $size, chr(0));
|
|
|
|
|
|
|
|
// Compare the mask
|
|
|
|
return ($ip & $mask) === ($net & $mask);
|
|
|
|
}
|
|
|
|
|
2023-11-23 15:05:50 +00:00
|
|
|
function getVersion()
|
|
|
|
{
|
2023-11-24 18:18:36 +00:00
|
|
|
if(file_exists(ROOT.DS.'..'.DS.'VERSION'))
|
2023-11-24 18:25:25 +00:00
|
|
|
return trim(file_get_contents(ROOT.DS.'..'.DS.'VERSION'));
|
2023-11-23 15:05:50 +00:00
|
|
|
else return '';
|
|
|
|
}
|
|
|
|
|
2023-11-09 22:21:17 +00:00
|
|
|
function generateRandomEmail()
|
|
|
|
{
|
|
|
|
$nouns = ["aardvark","abyssinian","accelerator","accordion","account","accountant","acknowledgment","acoustic","acrylic","act","action","activity","actor","actress","adapter","addition","address","adjustment","adult","advantage","advertisement","aftermath","afternoon","aftershave","afterthought","age","agenda","agreement","air","airbus","airmail","airplane","airport","airship","alarm","albatross","alcohol","algebra","algeria","alibi","alley","alligator","alloy","almanac","alphabet","alto","aluminium","aluminum","ambulance","america","amount","amusement","anatomy","anethesiologist","anger","angle","angora","animal","anime","ankle","answer","ant","anteater","antelope","anthony","anthropology","apartment","apology","apparatus","apparel","appeal","appendix","apple","appliance","approval","april","aquarius","arch","archaeology","archeology","archer","architecture","area","argentina","argument","aries","arithmetic","arm","armadillo","armchair","army","arrow","art","ash","ashtray","asia","asparagus","asphalt","asterisk","astronomy","athlete","ATM","atom","attack","attempt","attention","attic","attraction","august","aunt","australia","australian","author","authority","authorization","avenue","baboon","baby","back","backbone","bacon","badge","badger","bag","bagel","bagpipe","bail","bait","baker","bakery","balance","balinese","ball","balloon","bamboo","banana","band","bandana","bangle","banjo","bank","bankbook","banker","bar","barbara","barber","barge","baritone","barometer","base","baseball","basement","basin","basket","basketball","bass","bassoon","bat","bath","bathroom","bathtub","battery","battle","bay","beach","bead","beam","bean","bear","beard","beast","beat","beautician","beauty","beaver","bed","bedroom","bee","beech","beef","beer","beet","beetle","beggar","beginner","begonia","behavior","belgian","belief","bell","belt","bench","bengal","beret","berry","bestseller","betty","bibliography","bicycle","bike","bill","billboard","biology","biplane","birch","bird","birth","birthday","bit","bite","black","bladder","blade","blanket","blinker","blizzard","block","blouse","blow","blowgun","blue","board","boat","bobcat","body","bolt","bomb","bomber","bone","bongo","bonsai","book","bookcase","booklet","boot","border","botany","bottle","bottom","boundary","bow","bowl","box","boy","bra","brace","bracket","brain","brake","branch","brand","brandy","brass","brazil","bread","break","breakfast","breath","brian","brick","bridge","british","broccoli","brochure","broker","bronze","brother","brother-in-law","brow","brown","brush","bubble","bucket","budget","buffer","buffet","bugle","building","bulb","bull","bulldozer","bumper","bun","burglar","burma","burn","burst","bus","bush","business","butane","butcher","butter","button","buzzard","cabbage","cabinet","cable","cactus","cafe","cake","calculator","calculus","calendar","calf","call","camel","camera","camp","can","cancer","candle","cannon","canoe","canvas","cap","capital","cappelletti","capricorn","captain","caption","car","caravan","carbon","card","cardboard","cardigan","care","carnation","carol","carp","carpenter","carriage","carrot","cart","cartoon","case","cast","castanet","cat","catamaran","caterpillar","cathedral","catsup","cattle","cauliflower","cause","caution","cave","c-clamp","cd","ceiling","celery","celeste","cell","cellar","cello","celsius","cement","cemetery","cent","centimeter","century","ceramic","cereal","certification","chain","chair","chalk","chance","change","channel","character","chard","charles","chauffeur","check","cheek","cheese","cheetah","chef","chemistry","cheque","cherry","chess","chest","chick","chicken","chicory","chief","child","children","chill","chime","chimpanzee","chin","china","chinese","chive","chocolate","chord","christmas","christopher","chronometer","church","cicada","cinema","circle","circulation","cirrus","citizenship","city","clam","clarinet","class","claus","clave","clef","clerk","click","client","climb","clipper","cloakroom","clock","close","closet","cloth","cloud","clover","club","clutch","coach","coal","coast","coat","cobweb","cockroach","cocktail"
|
|
|
|
|
|
|
|
$adjectives = ["abased","abject","able","abloom","ablush","abreast","abridged","abroach","abroad","abrupt","abscessed","absolved","absorbed","abstruse","absurd","abused","abuzz","accrete","accrued","accurst","acerb","aching","acock","acold","acorned","acred","acrid","acting","added","addle","addorsed","adept","adjunct","admired","adnate","adored","adrift","adroit","adscript","adult","adunc","adust","advised","aery","afeard","afeared","affine","affined","afire","aflame","afloat","afoot","afoul","afraid","after","aftmost","agape","agaze","aged","ageing","ageless","agelong","aggrieved","aghast","agile","aging","agleam","agley","aglow","agnate","ago","agog","agone","agreed","aground","ahead","ahorse","ahull","aidful","aidless","ailing","aimless","ain","air","airborne","airless","airsick","airtight","ajar","akin","alar","alate","alert","algal","algid","algoid","alien","alight","alike","alined","alive","alleged","allowed","alloyed","alone","aloof","alright","altered","altern","alvine","amazed","amber","amiss","amok","amort","ample","amuck","amused","android","angled","anguine","anguished","anile","announced","ansate","anti","antic","antique","antlered","antlike","antrorse","anxious","apart","apeak","apish","appalled","applied","appressed","arcane","arching","argent","arid","armchair","armless","armored","aroid","aroused","arranged","arrant","arrased","arrhythmic","artful","artless","arty","ashake","ashamed","ashen","ashy","askance","askant","askew","asking","aslant","asleep","aslope","asphalt","asprawl","asquint","assumed","assured","astir","astral","astute","aswarm","athirst","atilt","atrip","attached","attack","attent","attired","attrite","attuned","audile","aurous","austere","averse","avid","avowed","awake","aware","awash","away","aweless","awesome","awestruck","awful","awheel","awing","awkward","awnless","awry","axile","azure","babbling","baccate","backboned","backhand","backless","backmost","backstage","backstair","backstairs","backswept","backward","backwoods","baddish","baffling","baggy","bairnly","balanced","balding","baldish","baleful","balky","bally","balmy","banal","bandaged","banded","baneful","bangled","bankrupt","banner","bannered","baptist","bar","barbate","bardic","bardy","bareback","barebacked","barefaced","barefoot","barer","barest","baric","barish","barkless","barky","barmy","baroque","barrelled","baseless","baser","basest","bashful","basic","bassy","bastioned","bated","battered","battled","batty","bausond","bawdy","beaded","beady","beaky","beaming","beamish","beamless","beamy","beardless","bearish","bearlike","beastlike","beastly","beaten","beating","beauish","becalmed","bedded","bedfast","bedight","bedimmed","bedrid","beechen","beefy","beery","beetle","befogged","begrimed","beguiled","behind","bellied","belted","bemazed","bemused","bended","bending","bendwise","bendy","benign","benthic","benzal","bereft","berried","berserk","besieged","bespoke","besprent","bestead","bestial","betrothed","beveled","biased","bifid","biform","bigger","biggest","biggish","bijou","bilgy","bilious","billion","billionth","bilobed","binate","biped","birchen","birdlike","birken","bistred","bitchy","bitless","bitten","bitty","bivalve","bizarre","blackish","blameful","blameless","blaring","blasted","blasting","blatant","bleary","blended","blending","blindfold","blinding","blinking","blissful","blissless","blithesome","bloated","blockish","blocky","blooded","bloodied","bloodshot","bloodstained","blooming","bloomless","bloomy","blotchy","blotto","blotty","blowhard","blowsy","blowy","blowzy","blubber","bluer","bluest","bluish","blurry","blushful","blushless","boarish","boastful","boastless","bobtail","bodger","bodied","boding","boggy","bogus","bomb","bombproof","boneless","bonism","bonkers","bony","bonzer","bookish","bookless","boorish","booted","bootleg","bootless","boozy","bordered","boring","bosker","bosky","bosom","bosomed","bossy","botchy","bouffant","boughten","bouilli","bouncy","bounded","bounden","boundless","bousy","bovid","bovine","bowing","boxlike","boyish","bracing","brackish","bractless","braggart","bragging","braided"
|
|
|
|
|
|
|
|
|
|
|
|
$settings = loadSettings();
|
|
|
|
$domains = explode(',', $settings['DOMAINS']);
|
|
|
|
$dom = $domains[array_rand($domains)];
|
2023-11-23 09:26:44 +00:00
|
|
|
|
2023-11-09 22:21:17 +00:00
|
|
|
$dom = str_replace('*', $nouns[array_rand($nouns)], $dom);
|
|
|
|
while (strpos($dom, '*') !== false) {
|
|
|
|
$dom = str_replace('*', $nouns[array_rand($nouns)], $dom);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return $adjectives[array_rand($adjectives)] . '.' . $nouns[array_rand($nouns)].'@'.$dom;
|
2023-11-11 13:36:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function removeScriptsFromHtml($html) {
|
|
|
|
// Remove script tags
|
|
|
|
$html = preg_replace('/<script\b[^>]*>(.*?)<\/script>/is', "", $html);
|
|
|
|
|
|
|
|
// Remove event attributes that execute scripts
|
|
|
|
$html = preg_replace('/\bon\w+="[^"]*"/i', "", $html);
|
|
|
|
|
|
|
|
// Remove href attributes that execute scripts
|
|
|
|
$html = preg_replace('/\bhref="javascript[^"]*"/i', "", $html);
|
|
|
|
|
|
|
|
// Remove any other attributes that execute scripts
|
|
|
|
$html = preg_replace('/\b\w+="[^"]*\bon\w+="[^"]*"[^>]*>/i', "", $html);
|
|
|
|
|
|
|
|
return $html;
|
|
|
|
}
|
|
|
|
|
|
|
|
function countEmailsOfAddress($email)
|
|
|
|
{
|
|
|
|
$count = 0;
|
|
|
|
if ($handle = opendir(getDirForEmail($email))) {
|
|
|
|
while (false !== ($entry = readdir($handle)))
|
|
|
|
if (endsWith($entry,'.json'))
|
|
|
|
$count++;
|
|
|
|
}
|
|
|
|
closedir($handle);
|
|
|
|
return $count;
|
|
|
|
}
|
|
|
|
|
|
|
|
function delTree($dir) {
|
|
|
|
|
|
|
|
$files = array_diff(scandir($dir), array('.','..'));
|
|
|
|
foreach ($files as $file) {
|
|
|
|
(is_dir("$dir/$file")) ? delTree("$dir/$file") : unlink("$dir/$file");
|
|
|
|
}
|
|
|
|
return rmdir($dir);
|
|
|
|
|
|
|
|
}
|