mirror of
https://gitlab.com/pholy/OSCAR-code.git
synced 2025-04-06 03:00:43 +00:00
Merge branch 'master' into translations
This commit is contained in:
commit
f29602a02e
0
Building/Linux/OSCAR.desktop
Executable file → Normal file
0
Building/Linux/OSCAR.desktop
Executable file → Normal file
@ -4,8 +4,10 @@ you have already compiled and linked OSCAR using qmake and make, or QtCreator.
|
|||||||
The packages, once built, can be installed using 'dpkg i' or 'gdebi' - with either su or sudo. If you use
|
The packages, once built, can be installed using 'dpkg i' or 'gdebi' - with either su or sudo. If you use
|
||||||
sudo, you will get a desktop icon installed, which must be double-clicked and trusted to see the actual icon.
|
sudo, you will get a desktop icon installed, which must be double-clicked and trusted to see the actual icon.
|
||||||
|
|
||||||
The packaged file cannot be installed using apt, apt-get, or aptitude, because those programs
|
The packaged file cannot be installed using apt-get, or aptitude, because those programs
|
||||||
install from the distribution repository, but not a plan package file.
|
install from the distribution repository, but not a plain package file.
|
||||||
|
|
||||||
|
The package can be installed with apt or apt -F if the file name is preceded by ./ to force filename recognition.
|
||||||
|
|
||||||
The packaging scripts assume the following folder structure:
|
The packaging scripts assume the following folder structure:
|
||||||
|
|
||||||
@ -22,6 +24,9 @@ The code to put a desktop icon in the Desktop folder, regardless of language, wa
|
|||||||
|
|
||||||
Getting a menu item istalled is largely due to the efforts of CrimsonNape.
|
Getting a menu item istalled is largely due to the efforts of CrimsonNape.
|
||||||
|
|
||||||
|
The mkDebian9.sh and mkUbuntu.sh scripts have been consolidated into mkDistDeb.sh and are now depreciated and will be removed. mkDistDeb.sh
|
||||||
|
has code to query which packages are available for certail libraries.
|
||||||
|
|
||||||
Finally, the mkRedHat.sh script has not been tested.
|
Finally, the mkRedHat.sh script has not been tested.
|
||||||
|
|
||||||
|
|
||||||
|
@ -10,32 +10,32 @@ appli_name="OSCAR-test"
|
|||||||
# binary copy flag
|
# binary copy flag
|
||||||
copy_flag=0
|
copy_flag=0
|
||||||
|
|
||||||
# Select the binary file for the goog version of QT (5.7 ou 5.9)
|
#### Select the binary file for the goog version of QT (5.7 ou 5.9)
|
||||||
#echo "test the version of qt5 core version"
|
####echo "test the version of qt5 core version"
|
||||||
|
###
|
||||||
|
###if [ -f "/etc/redhat-release" ]; then
|
||||||
|
### # pour mageia (red hat)
|
||||||
|
### echo "distribution : mageia (red hat)"
|
||||||
|
### Qt5_core=$(yum search -q qt5core5 | awk '{print $1}' | grep lib | sort -u)
|
||||||
|
### MajorVer_no=$(yum info $Qt5_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $1}')
|
||||||
|
### MinorVer_no=$(yum info $Qt5_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $2}')
|
||||||
|
###
|
||||||
|
###elif [ -f "/etc/lsb-release" ] || [ -f "/etc/debian_version" ]; then
|
||||||
|
### # pour debian
|
||||||
|
#### echo "distribution : debian"
|
||||||
|
### Qt5_core=$(dpkg -l | awk '{print $2}'| grep qt5 | grep core | awk -F: '{print $1}')
|
||||||
|
### Major_ver=$(dpkg -l | grep $Qt5_core | awk '{print $3}' | awk -F. '{print $1}')
|
||||||
|
### Minor_ver=$(dpkg -l | grep $Qt5_core | awk '{print $3}' | awk -F. '{print $2}')
|
||||||
|
###else
|
||||||
|
### echo "unknown distribution "
|
||||||
|
### exit
|
||||||
|
###fi
|
||||||
|
###
|
||||||
|
####echo "Qt5_core = '$Qt5_core'"
|
||||||
|
####echo "Major_ver = $Major_ver"
|
||||||
|
####echo "Minor_ver = $Minor_ver"
|
||||||
|
|
||||||
if [ -f "/etc/redhat-release" ]; then
|
##if [ -x /usr/bin/update-menus ]; then update-menus; fi
|
||||||
# pour mageia (red hat)
|
|
||||||
echo "distribution : mageia (red hat)"
|
|
||||||
Qt5_core=$(yum search -q qt5core5 | awk '{print $1}' | grep lib | sort -u)
|
|
||||||
MajorVer_no=$(yum info $Qt5_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $1}')
|
|
||||||
MinorVer_no=$(yum info $Qt5_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $2}')
|
|
||||||
|
|
||||||
elif [ -f "/etc/lsb-release" ] || [ -f "/etc/debian_version" ]; then
|
|
||||||
# pour debian
|
|
||||||
# echo "distribution : debian"
|
|
||||||
Qt5_core=$(dpkg -l | awk '{print $2}'| grep qt5 | grep core | awk -F: '{print $1}')
|
|
||||||
Major_ver=$(dpkg -l | grep $Qt5_core | awk '{print $3}' | awk -F. '{print $1}')
|
|
||||||
Minor_ver=$(dpkg -l | grep $Qt5_core | awk '{print $3}' | awk -F. '{print $2}')
|
|
||||||
else
|
|
||||||
echo "unknown distribution "
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
#echo "Qt5_core = '$Qt5_core'"
|
|
||||||
#echo "Major_ver = $Major_ver"
|
|
||||||
#echo "Minor_ver = $Minor_ver"
|
|
||||||
|
|
||||||
if [ -x /usr/bin/update-menus ]; then update-menus; fi
|
|
||||||
|
|
||||||
if [ ! -z "$SUDO_USER" ]; then
|
if [ ! -z "$SUDO_USER" ]; then
|
||||||
# find real name of the Desktop folder (Bureau for xubuntu french version)
|
# find real name of the Desktop folder (Bureau for xubuntu french version)
|
||||||
|
@ -10,38 +10,38 @@ appli_name="OSCAR"
|
|||||||
# binary copy flag
|
# binary copy flag
|
||||||
copy_flag=0
|
copy_flag=0
|
||||||
|
|
||||||
# Select the binary file for the goog version of QT (5.7 ou 5.9)
|
#### Select the binary file for the goog version of QT (5.7 ou 5.9)
|
||||||
echo "test the version of qt5 core version"
|
###echo "test the version of qt5 core version"
|
||||||
|
###
|
||||||
|
###if [ -f "/etc/redhat-release" ]; then
|
||||||
|
### # pour mageia (red hat)
|
||||||
|
### echo "distribution : mageia (red hat)"
|
||||||
|
### nom_core=$(yum search -q qt5core5 | awk '{print $1}' | grep lib | sort -u)
|
||||||
|
### no_vermaj=$(yum info $nom_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $1}')
|
||||||
|
### no_vermin=$(yum info $nom_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $2}')
|
||||||
|
###
|
||||||
|
###elif [ -f "/etc/lsb-release" ] || [ -f "/etc/debian_version" ]; then
|
||||||
|
### # pour debian
|
||||||
|
### echo "distribution : debian"
|
||||||
|
### nom_core=$(dpkg -l | awk '{print $2}'| grep qt5 | grep core | awk -F: '{print $1}')
|
||||||
|
### no_vermaj=$(dpkg -l | grep $nom_core | awk '{print $3}' | awk -F. '{print $1}')
|
||||||
|
### no_vermin=$(dpkg -l | grep $nom_core | awk '{print $3}' | awk -F. '{print $2}')
|
||||||
|
###else
|
||||||
|
### echo "unknown distribution "
|
||||||
|
### exit
|
||||||
|
###fi
|
||||||
|
###
|
||||||
|
###echo "QT5_core = '$nom_core'"
|
||||||
|
###echo "Major ver = $no_vermaj"
|
||||||
|
###echo "Minor ver = $no_vermin"
|
||||||
|
|
||||||
if [ -f "/etc/redhat-release" ]; then
|
##if [ -x /usr/bin/update-menus ]; then update-menus; fi
|
||||||
# pour mageia (red hat)
|
|
||||||
echo "distribution : mageia (red hat)"
|
|
||||||
nom_core=$(yum search -q qt5core5 | awk '{print $1}' | grep lib | sort -u)
|
|
||||||
no_vermaj=$(yum info $nom_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $1}')
|
|
||||||
no_vermin=$(yum info $nom_core | grep -i "version" | awk '{print $3}' | awk -F. '{print $2}')
|
|
||||||
|
|
||||||
elif [ -f "/etc/lsb-release" ] || [ -f "/etc/debian_version" ]; then
|
|
||||||
# pour debian
|
|
||||||
echo "distribution : debian"
|
|
||||||
nom_core=$(dpkg -l | awk '{print $2}'| grep qt5 | grep core | awk -F: '{print $1}')
|
|
||||||
no_vermaj=$(dpkg -l | grep $nom_core | awk '{print $3}' | awk -F. '{print $1}')
|
|
||||||
no_vermin=$(dpkg -l | grep $nom_core | awk '{print $3}' | awk -F. '{print $2}')
|
|
||||||
else
|
|
||||||
echo "unknown distribution "
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "QT5_core = '$nom_core'"
|
|
||||||
echo "Major ver = $no_vermaj"
|
|
||||||
echo "Minor ver = $no_vermin"
|
|
||||||
|
|
||||||
if [ -x /usr/bin/update-menus ]; then update-menus; fi
|
|
||||||
|
|
||||||
if [ X_$SUDO_USER != "X_" ]; then
|
if [ X_$SUDO_USER != "X_" ]; then
|
||||||
# find real name of the Desktop folder (Bureau for xubuntu french version)
|
# find real name of the Desktop folder (Bureau for xubuntu french version)
|
||||||
desktop_folder_name="/home/$SUDO_USER/Desktop"
|
desktop_folder_name="/home/$SUDO_USER/Desktop"
|
||||||
|
|
||||||
# si doesn't exist, try to find it translated name
|
# if doesn't exist, try to find it translated name
|
||||||
tmp_dir=""
|
tmp_dir=""
|
||||||
if [ ! -d "$desktop_folder_name" ]; then
|
if [ ! -d "$desktop_folder_name" ]; then
|
||||||
tmp_dir=`cat /home/$SUDO_USER/.config/user-dirs.dirs | grep XDG_DESKTOP_DIR | awk -F= '{print $2}' | awk -F\" '{print $2}' | awk -F\/ '{print $2}'`
|
tmp_dir=`cat /home/$SUDO_USER/.config/user-dirs.dirs | grep XDG_DESKTOP_DIR | awk -F= '{print $2}' | awk -F\" '{print $2}' | awk -F\/ '{print $2}'`
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
#! /bin/bash
|
|
||||||
#
|
|
||||||
mv /opt/tempDir /opt/OSCAR
|
|
||||||
ln -s /opt/OSCAR/OSCAR /usr/bin/OSCAR
|
|
||||||
cp /opt/OSCAR/OSCAR.desktop /home/$SUDO_USER/Desktop
|
|
||||||
chown $SUDO_USER:$SUDO_USER /home/$SUDO_USER/Desktop/OSCAR.desktop
|
|
||||||
chmod a+x /home/$SUDO_USER/Desktop/OSCAR.desktop
|
|
||||||
#
|
|
@ -30,7 +30,7 @@ pre_rem="rm_usrbin.sh"
|
|||||||
post_rem="clean_rm.sh"
|
post_rem="clean_rm.sh"
|
||||||
# build folder (absolute path is better)
|
# build folder (absolute path is better)
|
||||||
build_folder="/home/$USER/OSCAR/build"
|
build_folder="/home/$USER/OSCAR/build"
|
||||||
if [[ -n ${PRERELEASE} && ${RC} ]] ; then
|
if [[ -n ${PRERELEASE} && -z ${RC} ]] ; then
|
||||||
appli_name=${appli_name}-test
|
appli_name=${appli_name}-test
|
||||||
post_inst="ln_usrbin-test.sh"
|
post_inst="ln_usrbin-test.sh"
|
||||||
pre_rem="rm_usrbin-test.sh"
|
pre_rem="rm_usrbin-test.sh"
|
||||||
|
179
Building/Linux/mkDistDeb.sh
Executable file
179
Building/Linux/mkDistDeb.sh
Executable file
@ -0,0 +1,179 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
# First parameter is optional
|
||||||
|
#
|
||||||
|
function getPkg () {
|
||||||
|
unset PKGNAME
|
||||||
|
unset PKGVERS
|
||||||
|
while read stat pkg ver other ;
|
||||||
|
do
|
||||||
|
if [[ ${stat} == "ii" ]] ; then
|
||||||
|
PKGNAME=`awk -F: '{print $1}' <<< ${pkg}`
|
||||||
|
PKGVERS=`awk -F. '{print $1 "." $2}' <<< ${ver}`
|
||||||
|
break
|
||||||
|
fi ;
|
||||||
|
done <<< $(dpkg -l | grep $1)
|
||||||
|
}
|
||||||
|
|
||||||
|
ITERATION=$1
|
||||||
|
if [ -z ${ITERATION} ]; then
|
||||||
|
ITERATION="1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SRC=/home/$USER/OSCAR/OSCAR-code/oscar
|
||||||
|
|
||||||
|
VERSION=`awk '/#define VERSION / { gsub(/"/, "", $3); print $3 }' ${SRC}/VERSION`
|
||||||
|
if [[ ${VERSION} == *-* ]]; then
|
||||||
|
# Use ~ for prerelease information so that it sorts correctly compared to release
|
||||||
|
# versions. See https://www.debian.org/doc/debian-policy/ch-controlfields.html#version
|
||||||
|
IFS="-" read -r VERSION PRERELEASE <<< ${VERSION}
|
||||||
|
if [[ ${PRERELEASE} == *rc* ]]; then
|
||||||
|
RC=1
|
||||||
|
fi
|
||||||
|
VERSION="${VERSION}~${PRERELEASE}"
|
||||||
|
fi
|
||||||
|
GIT_REVISION=`awk '/#define GIT_REVISION / { gsub(/"/, "", $3); print $3 }' ${SRC}/git_info.h`
|
||||||
|
echo Version: ${VERSION}
|
||||||
|
|
||||||
|
# application name
|
||||||
|
appli_name="OSCAR"
|
||||||
|
pre_inst="tst_user.sh"
|
||||||
|
# build folder (absolute path is better)
|
||||||
|
build_folder="/home/$USER/OSCAR/build"
|
||||||
|
if [[ -n ${PRERELEASE} && -z ${RC} ]] ; then
|
||||||
|
appli_name=${appli_name}-test
|
||||||
|
post_inst="ln_usrbin-test.sh"
|
||||||
|
pre_rem="rm_usrbin-test.sh"
|
||||||
|
post_rem="clean_rm-test.sh"
|
||||||
|
else
|
||||||
|
post_inst="ln_usrbin.sh"
|
||||||
|
pre_rem="rm_usrbin.sh"
|
||||||
|
post_rem="clean_rm.sh"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# temporary folder (absolute path is better)
|
||||||
|
temp_folder="/home/$USER/tmp_deb_${appli_name}/"
|
||||||
|
|
||||||
|
# destination folder in the .deb file
|
||||||
|
dest_folder="/usr/"
|
||||||
|
|
||||||
|
# the .deb file mustn't exist
|
||||||
|
archi_tmp=$(lscpu | grep -i architecture | awk -F: {'print $2'} | tr -d " ")
|
||||||
|
if [ "$archi_tmp" = "x86_64" ];then
|
||||||
|
archi="amd64"
|
||||||
|
else
|
||||||
|
archi="unknown"
|
||||||
|
fi
|
||||||
|
deb_file="${appli_name}_${VERSION}-${ITERATION}_$archi.deb"
|
||||||
|
|
||||||
|
# if deb file exists, fatal error
|
||||||
|
if [ -f "./$deb_file" ]; then
|
||||||
|
echo "destination file (./$deb_file) exists. fatal error"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
# retrieve packages version for the dependencies
|
||||||
|
getPkg libqt5core
|
||||||
|
qtver=$PKGVERS
|
||||||
|
|
||||||
|
getPkg libdouble
|
||||||
|
dblPkg=$PKGNAME
|
||||||
|
|
||||||
|
echo "QT version " $qtver
|
||||||
|
echo "DblConv package " $dblPkg
|
||||||
|
|
||||||
|
# clean folders need to create the package
|
||||||
|
if [ -d "${temp_folder}" ]; then
|
||||||
|
rm -r ${temp_folder}
|
||||||
|
fi
|
||||||
|
mkdir ${temp_folder}
|
||||||
|
if [ ! -d "${temp_folder}" ]; then
|
||||||
|
echo "Folder (${temp_folder}) not created : fatal error."
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
chmod 0755 ${temp_folder}
|
||||||
|
# save current value of umask (for u=g and not g=o)
|
||||||
|
current_value=$(umask)
|
||||||
|
umask 022
|
||||||
|
mkdir ${temp_folder}/bin
|
||||||
|
mkdir ${temp_folder}/share
|
||||||
|
mkdir ${temp_folder}/share/${appli_name}
|
||||||
|
mkdir ${temp_folder}/share/doc
|
||||||
|
share_doc_folder="${temp_folder}/share/doc/${appli_name}"
|
||||||
|
mkdir ${share_doc_folder}
|
||||||
|
mkdir ${temp_folder}/share/icons
|
||||||
|
mkdir ${temp_folder}/share/icons/hicolor
|
||||||
|
mkdir ${temp_folder}/share/icons/hicolor/48x48
|
||||||
|
mkdir ${temp_folder}/share/icons/hicolor/48x48/apps
|
||||||
|
mkdir ${temp_folder}/share/icons/hicolor/scalable
|
||||||
|
mkdir ${temp_folder}/share/icons/hicolor/scalable/apps
|
||||||
|
mkdir ${temp_folder}/share/applications
|
||||||
|
|
||||||
|
# must delete debug symbol in OSCAR binary file
|
||||||
|
# --- V1
|
||||||
|
strip -s -o ${temp_folder}/bin/${appli_name} ${build_folder}/oscar/OSCAR
|
||||||
|
#old code : cp ${build_folder}/oscar/OSCAR ${temp_folder}/bin
|
||||||
|
|
||||||
|
# 2>/dev/null : errors does not appear : we don't care about them
|
||||||
|
cp -r ${build_folder}/oscar/Help ${temp_folder}/share/${appli_name} 2>/dev/null
|
||||||
|
cp -r ${build_folder}/oscar/Html ${temp_folder}/share/${appli_name} 2>/dev/null
|
||||||
|
cp -r ${build_folder}/oscar/Translations ${temp_folder}/share/${appli_name} 2>/dev/null
|
||||||
|
cp ./${appli_name}.png ${temp_folder}/share/icons/hicolor/48x48/apps/${appli_name}.png
|
||||||
|
cp ./${appli_name}.svg ${temp_folder}/share/icons/hicolor/scalable/apps/${appli_name}.svg
|
||||||
|
cp ./${appli_name}.desktop ${temp_folder}/share/applications/${appli_name}.desktop
|
||||||
|
|
||||||
|
echo "Copyright 2019-2020 oscar-team.org <oscar@oscar-team.org>" > $share_doc_folder/copyright
|
||||||
|
|
||||||
|
changelog_file="$share_doc_folder/changelog"
|
||||||
|
|
||||||
|
#automatic changelog as a bad name
|
||||||
|
# need to generate one and say fpm to use it instead of create one
|
||||||
|
# it seems that it needs both of them...
|
||||||
|
|
||||||
|
# creation of the changelog.Debian.gz
|
||||||
|
echo "$appli_name (${VERSION}-${ITERATION}) whatever; urgency=medium" > $changelog_file
|
||||||
|
echo "" >> $changelog_file
|
||||||
|
echo " * Package created with FPM." >> $changelog_file
|
||||||
|
echo "" >> $changelog_file
|
||||||
|
echo " -- oscar-team.org <oscar@oscar-team.org>" >> $changelog_file
|
||||||
|
gzip --best $changelog_file
|
||||||
|
description='Open Source CPAP Analysis Reporter\n<extended description needed to be filled with the right value>'
|
||||||
|
# trick for dummies : need to use echo -e to take care of \n (cariage return to slip description and extra one
|
||||||
|
description=$(echo -e $description)
|
||||||
|
|
||||||
|
# restore umask value
|
||||||
|
umask $current_value
|
||||||
|
|
||||||
|
# create the .deb file (litian test show juste a warning with a man that doesn't exists : don't care about that)
|
||||||
|
fpm --input-type dir --output-type deb \
|
||||||
|
--prefix ${dest_folder} \
|
||||||
|
--before-install ${pre_inst} \
|
||||||
|
--after-install ${post_inst} \
|
||||||
|
--before-remove ${pre_rem} \
|
||||||
|
--after-remove ${post_rem} \
|
||||||
|
--name ${appli_name} --version ${VERSION} --iteration ${ITERATION} \
|
||||||
|
--category misc \
|
||||||
|
--deb-priority optional \
|
||||||
|
--maintainer " -- oscar-team.org <oscar@oscar-team.org>" \
|
||||||
|
--license GPL-v3 \
|
||||||
|
--vendor oscar-team.org \
|
||||||
|
--description "$description" \
|
||||||
|
--url https://sleepfiles.com/OSCAR \
|
||||||
|
--deb-no-default-config-files \
|
||||||
|
--depends $dblPkg \
|
||||||
|
--depends libpcre16-3 \
|
||||||
|
--depends qttranslations5-l10n \
|
||||||
|
--depends "libqt5core5a > $qtver" \
|
||||||
|
--depends libqt5serialport5 \
|
||||||
|
--depends libqt5xml5 \
|
||||||
|
--depends libqt5network5 \
|
||||||
|
--depends libqt5gui5 \
|
||||||
|
--depends libqt5widgets5 \
|
||||||
|
--depends libqt5opengl5 \
|
||||||
|
--depends libqt5printsupport5 \
|
||||||
|
--depends libglu1-mesa \
|
||||||
|
--depends libgl1 \
|
||||||
|
--depends libc6 \
|
||||||
|
--no-deb-generate-changes \
|
||||||
|
-C ${temp_folder} \
|
||||||
|
.
|
||||||
|
|
@ -1,6 +0,0 @@
|
|||||||
#! /bin/bash
|
|
||||||
#
|
|
||||||
mv /opt/OSCAR /opt/tempDir
|
|
||||||
rm /usr/bin/OSCAR
|
|
||||||
rm /home/$SUDO_USER/Desktop/OSCAR.desktop
|
|
||||||
#
|
|
@ -21,10 +21,15 @@
|
|||||||
</ul>
|
</ul>
|
||||||
<li>[new] Add the "peak flow" channel reported by pre-DreamStation ventilators.</li>
|
<li>[new] Add the "peak flow" channel reported by pre-DreamStation ventilators.</li>
|
||||||
<li>[new] Automatically detect and resolve graphics-related crashes on Windows.</li>
|
<li>[new] Automatically detect and resolve graphics-related crashes on Windows.</li>
|
||||||
|
<li>[new] Support AVAPS in the Overview pressure chart.</li>
|
||||||
|
<li>[fix] Fix missing bars in the Overview pressure chart for Philips Respironics devices.</li>
|
||||||
|
<li>[fix] Add missing Philips Respironics pressure channels to CSV export.</li>
|
||||||
|
<li>[fix] Fix zero Philips Respironics AHI in CSV session export.</li>
|
||||||
<li>[fix] Add support for the Bi-Flex lock setting on pre-DreamStation ventilators.</li>
|
<li>[fix] Add support for the Bi-Flex lock setting on pre-DreamStation ventilators.</li>
|
||||||
<li>[fix] Fix the pressure waveform scale for the BiPAP autoSV Advanced 30 (960T)</li>
|
<li>[fix] Fix the pressure waveform scale for the BiPAP autoSV Advanced 30 (960T)</li>
|
||||||
<li>[fix] Add support for rise time mode on DreamStation BiPAP devices (600X-700X).</li>
|
<li>[fix] Add support for rise time mode on DreamStation BiPAP devices (600X-700X).</li>
|
||||||
<li>[fix] Remove the ramp time and pressure settings when the ramp is disabled on pre-DreamStation devices.</li>
|
<li>[fix] Remove the ramp time and pressure settings when the ramp is disabled on pre-DreamStation devices.</li>
|
||||||
|
<li>[fix] Improve import of Philips Respironics oximetry data.</li>
|
||||||
<li>[fix] Fix occasional failure to save imported Viatom data.</li>
|
<li>[fix] Fix occasional failure to save imported Viatom data.</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>
|
<p>
|
||||||
|
@ -54,6 +54,8 @@ void usage() {
|
|||||||
qDebug() << "\t-g ### First signal";
|
qDebug() << "\t-g ### First signal";
|
||||||
qDebug() << "\t-m ### Last signal";
|
qDebug() << "\t-m ### Last signal";
|
||||||
qDebug() << "\t-s Signal list only";
|
qDebug() << "\t-s Signal list only";
|
||||||
|
qDebug() << "\t-H Don't print the header";
|
||||||
|
qDebug() << "\t-S Don't print signals list";
|
||||||
qDebug() << "\t-h or -? This help message";
|
qDebug() << "\t-h or -? This help message";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,6 +79,7 @@ int main(int argc, char *argv[]) {
|
|||||||
|
|
||||||
QString filename = args[args.size()-1];
|
QString filename = args[args.size()-1];
|
||||||
bool showall = false, brief = false;
|
bool showall = false, brief = false;
|
||||||
|
bool skipHeader = false, skipSignals = false;
|
||||||
|
|
||||||
for (int i = 1; i < args.size()-1; i++) {
|
for (int i = 1; i < args.size()-1; i++) {
|
||||||
if (args[i] == "-f")
|
if (args[i] == "-f")
|
||||||
@ -91,6 +94,10 @@ int main(int argc, char *argv[]) {
|
|||||||
showall = true;
|
showall = true;
|
||||||
else if (args[i] == "-s")
|
else if (args[i] == "-s")
|
||||||
brief = true;
|
brief = true;
|
||||||
|
else if (args[i] == "-H")
|
||||||
|
skipHeader = true;
|
||||||
|
else if (args[i] == "-S")
|
||||||
|
skipSignals = true;
|
||||||
else if ((args[i] == "-?") || (args[i] == "-h")) {
|
else if ((args[i] == "-?") || (args[i] == "-h")) {
|
||||||
usage();
|
usage();
|
||||||
exit(0);
|
exit(0);
|
||||||
@ -116,17 +123,19 @@ int main(int argc, char *argv[]) {
|
|||||||
|
|
||||||
|
|
||||||
QDate date = str.edfHdr.startdate_orig.date(); // each STR.edf record starts at 12 noon
|
QDate date = str.edfHdr.startdate_orig.date(); // each STR.edf record starts at 12 noon
|
||||||
|
int numDays = str.GetNumDataRecords();
|
||||||
|
|
||||||
qDebug() << str.filename << " starts at " << date << " for " << str.GetNumDataRecords()
|
qDebug() << str.filename << " starts at " << date << " for " << numDays
|
||||||
<< " days, with " << str.GetNumSignals() << " signals";
|
<< " days, ending at " << date.addDays(numDays) << " with " << str.GetNumSignals() << " signals";
|
||||||
|
|
||||||
if (args.size() == 2) {
|
if (args.size() == 2) {
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
dumpHeader( (str.edfHdr) );
|
if ( ! skipHeader)
|
||||||
|
dumpHeader( (str.edfHdr) );
|
||||||
dumpSignals( (str.edfsignals) );
|
if ( ! skipSignals )
|
||||||
|
dumpSignals( (str.edfsignals) );
|
||||||
|
|
||||||
if ( brief )
|
if ( brief )
|
||||||
exit(0);
|
exit(0);
|
||||||
@ -142,7 +151,7 @@ int main(int argc, char *argv[]) {
|
|||||||
if (lastSig == 0 )
|
if (lastSig == 0 )
|
||||||
lastSig = str.GetNumSignals();
|
lastSig = str.GetNumSignals();
|
||||||
|
|
||||||
if (((first > 0)&&(last == 0)) || last > size)
|
if ((last == 0) || (last > size))
|
||||||
last = size;
|
last = size;
|
||||||
|
|
||||||
date = date.addDays(first);
|
date = date.addDays(first);
|
||||||
|
239
oscar/Graphs/gPressureChart.cpp
Normal file
239
oscar/Graphs/gPressureChart.cpp
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
/* gPressureChart Implementation
|
||||||
|
*
|
||||||
|
* Copyright (c) 2020 The Oscar Team
|
||||||
|
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
|
*
|
||||||
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
|
* License. See the file COPYING in the main directory of the source code
|
||||||
|
* for more details. */
|
||||||
|
|
||||||
|
#include "gPressureChart.h"
|
||||||
|
|
||||||
|
gPressureChart::gPressureChart()
|
||||||
|
: gSummaryChart("Pressure", MT_CPAP)
|
||||||
|
{
|
||||||
|
addCalc(CPAP_Pressure, ST_SETMAX);
|
||||||
|
addCalc(CPAP_Pressure, ST_MID);
|
||||||
|
addCalc(CPAP_Pressure, ST_90P);
|
||||||
|
addCalc(CPAP_PressureMin, ST_SETMIN);
|
||||||
|
addCalc(CPAP_PressureMax, ST_SETMAX);
|
||||||
|
|
||||||
|
addCalc(CPAP_EPAP, ST_SETMAX);
|
||||||
|
addCalc(CPAP_IPAP, ST_SETMAX);
|
||||||
|
addCalc(CPAP_EPAPLo, ST_SETMAX);
|
||||||
|
addCalc(CPAP_IPAPHi, ST_SETMAX);
|
||||||
|
|
||||||
|
addCalc(CPAP_EPAP, ST_MID);
|
||||||
|
addCalc(CPAP_EPAP, ST_90P);
|
||||||
|
addCalc(CPAP_IPAP, ST_MID);
|
||||||
|
addCalc(CPAP_IPAP, ST_90P);
|
||||||
|
|
||||||
|
// PRS1 reports pressure adjustments instead of observed pressures on some machines
|
||||||
|
addCalc(CPAP_PressureSet, ST_MID);
|
||||||
|
addCalc(CPAP_PressureSet, ST_90P);
|
||||||
|
addCalc(CPAP_EPAPSet, ST_MID);
|
||||||
|
addCalc(CPAP_EPAPSet, ST_90P);
|
||||||
|
addCalc(CPAP_IPAPSet, ST_MID);
|
||||||
|
addCalc(CPAP_IPAPSet, ST_90P);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
int gPressureChart::addCalc(ChannelID code, SummaryType type)
|
||||||
|
{
|
||||||
|
QColor color = schema::channel[code].defaultColor();
|
||||||
|
if (type == ST_90P) {
|
||||||
|
color = brighten(color, 1.33f);
|
||||||
|
}
|
||||||
|
|
||||||
|
int index = gSummaryChart::addCalc(code, type, color);
|
||||||
|
|
||||||
|
// Save the code and type used to add this calculation so that getCalc()
|
||||||
|
// can retrieve it by code and type instead of by hard-coded index.
|
||||||
|
m_calcs[code][type] = index;
|
||||||
|
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SummaryCalcItem* gPressureChart::getCalc(ChannelID code, SummaryType type)
|
||||||
|
{
|
||||||
|
return &calcitems[m_calcs[code][type]];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void gPressureChart::afterDraw(QPainter &, gGraph &graph, QRectF rect)
|
||||||
|
{
|
||||||
|
QStringList presstr;
|
||||||
|
|
||||||
|
if (getCalc(CPAP_Pressure)->cnt > 0) {
|
||||||
|
presstr.append(channelRange(CPAP_Pressure, STR_TR_CPAP));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getCalc(CPAP_PressureMin, ST_SETMIN)->cnt > 0) {
|
||||||
|
// TODO: If using machines from different manufacturers in an overview,
|
||||||
|
// the below may not accurately find the APAP pressure channel for all
|
||||||
|
// days; but it only affects the summary label at the top.
|
||||||
|
ChannelID pressure = CPAP_Pressure;
|
||||||
|
if (getCalc(CPAP_PressureSet, ST_MID)->cnt > 0) {
|
||||||
|
pressure = CPAP_PressureSet;
|
||||||
|
}
|
||||||
|
presstr.append(QString("%1 %2/%3/%4/%5").
|
||||||
|
arg(STR_TR_APAP).
|
||||||
|
arg(getCalc(CPAP_PressureMin, ST_SETMIN)->min,0,'f',1).
|
||||||
|
arg(getCalc(pressure, ST_MID)->mid(), 0, 'f', 1).
|
||||||
|
arg(getCalc(pressure, ST_90P)->mid(),0,'f',1).
|
||||||
|
arg(getCalc(CPAP_PressureMax, ST_SETMAX)->max, 0, 'f', 1));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getCalc(CPAP_EPAP)->cnt > 0) {
|
||||||
|
// See CPAP_PressureSet note above.
|
||||||
|
ChannelID epap = CPAP_EPAP;
|
||||||
|
if (getCalc(CPAP_EPAPSet, ST_MID)->cnt > 0) {
|
||||||
|
epap = CPAP_EPAPSet;
|
||||||
|
}
|
||||||
|
presstr.append(channelRange(epap, STR_TR_EPAP));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getCalc(CPAP_IPAP)->cnt > 0) {
|
||||||
|
// See CPAP_PressureSet note above.
|
||||||
|
ChannelID ipap = CPAP_IPAP;
|
||||||
|
if (getCalc(CPAP_IPAPSet, ST_MID)->cnt > 0) {
|
||||||
|
ipap = CPAP_IPAPSet;
|
||||||
|
}
|
||||||
|
presstr.append(channelRange(ipap, STR_TR_IPAP));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getCalc(CPAP_EPAPLo)->cnt > 0) {
|
||||||
|
presstr.append(channelRange(CPAP_EPAPLo, STR_TR_EPAPLo));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getCalc(CPAP_IPAPHi)->cnt > 0) {
|
||||||
|
presstr.append(channelRange(CPAP_IPAPHi, STR_TR_IPAPHi));
|
||||||
|
}
|
||||||
|
|
||||||
|
QString txt = presstr.join(" ");
|
||||||
|
graph.renderText(txt, rect.left(), rect.top()-5*graph.printScaleY(), 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
QString gPressureChart::channelRange(ChannelID code, const QString & label)
|
||||||
|
{
|
||||||
|
SummaryCalcItem* calc = getCalc(code);
|
||||||
|
return QString("%1 %2/%3/%4").
|
||||||
|
arg(label).
|
||||||
|
arg(calc->min, 0, 'f', 1).
|
||||||
|
arg(calc->mid(), 0, 'f', 1).
|
||||||
|
arg(calc->max, 0, 'f', 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void gPressureChart::addSlice(ChannelID code, SummaryType type)
|
||||||
|
{
|
||||||
|
float value = 0;
|
||||||
|
QString label;
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case ST_SETMIN:
|
||||||
|
value = m_day->settings_min(code);
|
||||||
|
label = schema::channel[code].label();
|
||||||
|
break;
|
||||||
|
case ST_SETMAX:
|
||||||
|
value = m_day->settings_max(code);
|
||||||
|
label = schema::channel[code].label();
|
||||||
|
break;
|
||||||
|
case ST_MID:
|
||||||
|
value = m_day->calcMiddle(code);
|
||||||
|
label = m_day->calcMiddleLabel(code);
|
||||||
|
break;
|
||||||
|
case ST_90P:
|
||||||
|
value = m_day->calcPercentile(code);
|
||||||
|
label = m_day->calcPercentileLabel(code);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
qWarning() << "Unsupported summary type in gPressureChart";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
SummaryCalcItem* calc = getCalc(code, type);
|
||||||
|
float height = value - m_height;
|
||||||
|
|
||||||
|
m_slices->append(SummaryChartSlice(calc, value, height, label, calc->color));
|
||||||
|
m_height += height;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void gPressureChart::populate(Day * day, int idx)
|
||||||
|
{
|
||||||
|
CPAPMode mode = (CPAPMode)(int)qRound(day->settings_wavg(CPAP_Mode));
|
||||||
|
m_day = day;
|
||||||
|
m_slices = &cache[idx];
|
||||||
|
m_height = 0;
|
||||||
|
|
||||||
|
if (mode == MODE_CPAP) {
|
||||||
|
addSlice(CPAP_Pressure);
|
||||||
|
|
||||||
|
} else if (mode == MODE_APAP) {
|
||||||
|
addSlice(CPAP_PressureMin, ST_SETMIN);
|
||||||
|
if (!day->summaryOnly()) {
|
||||||
|
// Handle PRS1 pressure adjustments reported separately from average (EPAP) pressure
|
||||||
|
ChannelID pressure = CPAP_Pressure;
|
||||||
|
if (m_day->channelHasData(CPAP_PressureSet)) {
|
||||||
|
pressure = CPAP_PressureSet;
|
||||||
|
}
|
||||||
|
addSlice(pressure, ST_MID);
|
||||||
|
addSlice(pressure, ST_90P);
|
||||||
|
}
|
||||||
|
addSlice(CPAP_PressureMax, ST_SETMAX);
|
||||||
|
|
||||||
|
} else if (mode == MODE_BILEVEL_FIXED) {
|
||||||
|
addSlice(CPAP_EPAP);
|
||||||
|
addSlice(CPAP_IPAP);
|
||||||
|
|
||||||
|
} else if (mode == MODE_BILEVEL_AUTO_FIXED_PS) {
|
||||||
|
addSlice(CPAP_EPAPLo);
|
||||||
|
if (!day->summaryOnly()) {
|
||||||
|
addSlice(CPAP_EPAP, ST_MID);
|
||||||
|
addSlice(CPAP_EPAP, ST_90P);
|
||||||
|
addSlice(CPAP_IPAP, ST_MID);
|
||||||
|
addSlice(CPAP_IPAP, ST_90P);
|
||||||
|
}
|
||||||
|
addSlice(CPAP_IPAPHi);
|
||||||
|
|
||||||
|
} else if ((mode == MODE_BILEVEL_AUTO_VARIABLE_PS) || (mode == MODE_ASV_VARIABLE_EPAP)) {
|
||||||
|
addSlice(CPAP_EPAPLo);
|
||||||
|
if (!day->summaryOnly()) {
|
||||||
|
// Handle PRS1 pressure adjustments when reported instead of observed pressures
|
||||||
|
ChannelID epap = CPAP_EPAP;
|
||||||
|
if (m_day->channelHasData(CPAP_EPAPSet)) {
|
||||||
|
epap = CPAP_EPAPSet;
|
||||||
|
}
|
||||||
|
ChannelID ipap = CPAP_IPAP;
|
||||||
|
if (m_day->channelHasData(CPAP_IPAPSet)) {
|
||||||
|
ipap = CPAP_IPAPSet;
|
||||||
|
}
|
||||||
|
addSlice(epap, ST_MID);
|
||||||
|
addSlice(epap, ST_90P);
|
||||||
|
addSlice(ipap, ST_MID);
|
||||||
|
addSlice(ipap, ST_90P);
|
||||||
|
}
|
||||||
|
addSlice(CPAP_IPAPHi);
|
||||||
|
|
||||||
|
} else if (mode == MODE_ASV) {
|
||||||
|
addSlice(CPAP_EPAP);
|
||||||
|
if (!day->summaryOnly()) {
|
||||||
|
addSlice(CPAP_IPAP, ST_MID);
|
||||||
|
addSlice(CPAP_IPAP, ST_90P);
|
||||||
|
}
|
||||||
|
addSlice(CPAP_IPAPHi);
|
||||||
|
|
||||||
|
} else if (mode == MODE_AVAPS) {
|
||||||
|
addSlice(CPAP_EPAP);
|
||||||
|
if (!day->summaryOnly()) {
|
||||||
|
addSlice(CPAP_IPAP, ST_MID);
|
||||||
|
addSlice(CPAP_IPAP, ST_90P);
|
||||||
|
}
|
||||||
|
addSlice(CPAP_IPAPHi);
|
||||||
|
}
|
||||||
|
}
|
61
oscar/Graphs/gPressureChart.h
Normal file
61
oscar/Graphs/gPressureChart.h
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
/* gPressureChart Header
|
||||||
|
*
|
||||||
|
* Copyright (c) 2020 The Oscar Team
|
||||||
|
* Copyright (C) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
|
*
|
||||||
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
|
* License. See the file COPYING in the main directory of the source code
|
||||||
|
* for more details. */
|
||||||
|
|
||||||
|
#ifndef GPRESSURECHART_H
|
||||||
|
#define GPRESSURECHART_H
|
||||||
|
|
||||||
|
#include "gSessionTimesChart.h"
|
||||||
|
|
||||||
|
class gPressureChart : public gSummaryChart
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
gPressureChart();
|
||||||
|
virtual ~gPressureChart() {}
|
||||||
|
|
||||||
|
virtual Layer * Clone() {
|
||||||
|
gPressureChart * sc = new gPressureChart();
|
||||||
|
gSummaryChart::CloneInto(sc);
|
||||||
|
return sc;
|
||||||
|
}
|
||||||
|
|
||||||
|
// virtual void preCalc();
|
||||||
|
virtual void customCalc(Day *day, QVector<SummaryChartSlice> &slices) {
|
||||||
|
int size = slices.size();
|
||||||
|
float hour = day->hours(m_machtype);
|
||||||
|
for (int i=0; i < size; ++i) {
|
||||||
|
SummaryChartSlice & slice = slices[i];
|
||||||
|
SummaryCalcItem * calc = slices[i].calc;
|
||||||
|
|
||||||
|
calc->update(slice.value, hour);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
virtual void afterDraw(QPainter &, gGraph &, QRectF);
|
||||||
|
|
||||||
|
virtual void populate(Day * day, int idx);
|
||||||
|
|
||||||
|
virtual QString tooltipData(Day * day, int idx) {
|
||||||
|
return day->getCPAPModeStr() + "\n" + day->getPressureSettings() + gSummaryChart::tooltipData(day, idx);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual int addCalc(ChannelID code, SummaryType type);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
SummaryCalcItem* getCalc(ChannelID code, SummaryType type = ST_SETMAX);
|
||||||
|
QString channelRange(ChannelID code, const QString & label);
|
||||||
|
void addSlice(ChannelID code, SummaryType type = ST_SETMAX);
|
||||||
|
|
||||||
|
QHash<ChannelID,QHash<SummaryType,int>> m_calcs;
|
||||||
|
|
||||||
|
// State passed between populate() and addSlice():
|
||||||
|
Day* m_day;
|
||||||
|
QVector<SummaryChartSlice>* m_slices;
|
||||||
|
float m_height;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // GPRESSURECHART_H
|
@ -1,5 +1,6 @@
|
|||||||
/* gSessionTimesChart Implementation
|
/* gSessionTimesChart Implementation
|
||||||
*
|
*
|
||||||
|
* Copyright (c) 2020 The Oscar Team
|
||||||
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
*
|
*
|
||||||
* This file is subject to the terms and conditions of the GNU General Public
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
@ -101,6 +102,17 @@ void gSummaryChart::SetDay(Day *unused_day)
|
|||||||
//QMap<QDate, int> gSummaryChart::dayindex;
|
//QMap<QDate, int> gSummaryChart::dayindex;
|
||||||
//QList<Day *> gSummaryChart::daylist;
|
//QList<Day *> gSummaryChart::daylist;
|
||||||
|
|
||||||
|
int gSummaryChart::addCalc(ChannelID code, SummaryType type, QColor color)
|
||||||
|
{
|
||||||
|
calcitems.append(SummaryCalcItem(code, type, color));
|
||||||
|
return calcitems.size() - 1; // return the index of the newly appended calc
|
||||||
|
}
|
||||||
|
|
||||||
|
int gSummaryChart::addCalc(ChannelID code, SummaryType type)
|
||||||
|
{
|
||||||
|
return addCalc(code, type, schema::channel[code].defaultColor());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool gSummaryChart::keyPressEvent(QKeyEvent *event, gGraph *graph)
|
bool gSummaryChart::keyPressEvent(QKeyEvent *event, gGraph *graph)
|
||||||
{
|
{
|
||||||
@ -1288,199 +1300,3 @@ QString gAHIChart::tooltipData(Day *day, int idx)
|
|||||||
}
|
}
|
||||||
return QString("\n%1: %2").arg(STR_TR_AHI).arg(float(total) / hour,0,'f',2)+txt;
|
return QString("\n%1: %2").arg(STR_TR_AHI).arg(float(total) / hour,0,'f',2)+txt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
gPressureChart::gPressureChart()
|
|
||||||
:gSummaryChart("Pressure", MT_CPAP)
|
|
||||||
{
|
|
||||||
|
|
||||||
// Do not reorder these!!! :P
|
|
||||||
addCalc(CPAP_Pressure, ST_SETMAX, schema::channel[CPAP_Pressure].defaultColor()); // 00
|
|
||||||
addCalc(CPAP_Pressure, ST_MID, schema::channel[CPAP_Pressure].defaultColor()); // 01
|
|
||||||
addCalc(CPAP_Pressure, ST_90P, brighten(schema::channel[CPAP_Pressure].defaultColor(), 1.33f)); // 02
|
|
||||||
addCalc(CPAP_PressureMin, ST_SETMIN, schema::channel[CPAP_PressureMin].defaultColor()); // 03
|
|
||||||
addCalc(CPAP_PressureMax, ST_SETMAX, schema::channel[CPAP_PressureMax].defaultColor()); // 04
|
|
||||||
|
|
||||||
addCalc(CPAP_EPAP, ST_SETMAX, schema::channel[CPAP_EPAP].defaultColor()); // 05
|
|
||||||
addCalc(CPAP_IPAP, ST_SETMAX, schema::channel[CPAP_IPAP].defaultColor()); // 06
|
|
||||||
addCalc(CPAP_EPAPLo, ST_SETMAX, schema::channel[CPAP_EPAPLo].defaultColor()); // 07
|
|
||||||
addCalc(CPAP_IPAPHi, ST_SETMAX, schema::channel[CPAP_IPAPHi].defaultColor()); // 08
|
|
||||||
|
|
||||||
addCalc(CPAP_EPAP, ST_MID, schema::channel[CPAP_EPAP].defaultColor()); // 09
|
|
||||||
addCalc(CPAP_EPAP, ST_90P, brighten(schema::channel[CPAP_EPAP].defaultColor(),1.33f)); // 10
|
|
||||||
addCalc(CPAP_IPAP, ST_MID, schema::channel[CPAP_IPAP].defaultColor()); // 11
|
|
||||||
addCalc(CPAP_IPAP, ST_90P, brighten(schema::channel[CPAP_IPAP].defaultColor(),1.33f)); // 12
|
|
||||||
}
|
|
||||||
|
|
||||||
void gPressureChart::afterDraw(QPainter &, gGraph &graph, QRectF rect)
|
|
||||||
{
|
|
||||||
int pressure_cnt = calcitems[0].cnt;
|
|
||||||
int pressuremin_cnt = calcitems[3].cnt;
|
|
||||||
int epap_cnt = calcitems[5].cnt;
|
|
||||||
int ipap_cnt = calcitems[6].cnt;
|
|
||||||
int ipaphi_cnt = calcitems[8].cnt;
|
|
||||||
int epaplo_cnt = calcitems[7].cnt;
|
|
||||||
|
|
||||||
QStringList presstr;
|
|
||||||
|
|
||||||
float mid = 0;
|
|
||||||
|
|
||||||
if (pressure_cnt > 0) {
|
|
||||||
mid = calcitems[0].mid();
|
|
||||||
presstr.append(QString("%1 %2/%3/%4").
|
|
||||||
arg(STR_TR_CPAP).
|
|
||||||
arg(calcitems[0].min,0,'f',1).
|
|
||||||
arg(mid, 0, 'f', 1).
|
|
||||||
arg(calcitems[0].max,0,'f',1));
|
|
||||||
}
|
|
||||||
if (pressuremin_cnt > 0) {
|
|
||||||
presstr.append(QString("%1 %2/%3/%4/%5").
|
|
||||||
arg(STR_TR_APAP).
|
|
||||||
arg(calcitems[3].min,0,'f',1).
|
|
||||||
arg(calcitems[1].mid(), 0, 'f', 1).
|
|
||||||
arg(calcitems[2].mid(),0,'f',1).
|
|
||||||
arg(calcitems[4].max, 0, 'f', 1));
|
|
||||||
|
|
||||||
}
|
|
||||||
if (epap_cnt > 0) {
|
|
||||||
presstr.append(QString("%1 %2/%3/%4").
|
|
||||||
arg(STR_TR_EPAP).
|
|
||||||
arg(calcitems[5].min,0,'f',1).
|
|
||||||
arg(calcitems[5].mid(), 0, 'f', 1).
|
|
||||||
arg(calcitems[5].max, 0, 'f', 1));
|
|
||||||
}
|
|
||||||
if (ipap_cnt > 0) {
|
|
||||||
presstr.append(QString("%1 %2/%3/%4").
|
|
||||||
arg(STR_TR_IPAP).
|
|
||||||
arg(calcitems[6].min,0,'f',1).
|
|
||||||
arg(calcitems[6].mid(), 0, 'f', 1).
|
|
||||||
arg(calcitems[6].max, 0, 'f', 1));
|
|
||||||
}
|
|
||||||
if (epaplo_cnt > 0) {
|
|
||||||
presstr.append(QString("%1 %2/%3/%4").
|
|
||||||
arg(STR_TR_EPAPLo).
|
|
||||||
arg(calcitems[7].min,0,'f',1).
|
|
||||||
arg(calcitems[7].mid(), 0, 'f', 1).
|
|
||||||
arg(calcitems[7].max, 0, 'f', 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ipaphi_cnt > 0) {
|
|
||||||
presstr.append(QString("%1 %2/%3/%4").
|
|
||||||
arg(STR_TR_IPAPHi).
|
|
||||||
arg(calcitems[8].min,0,'f',1).
|
|
||||||
arg(calcitems[8].mid(), 0, 'f', 1).
|
|
||||||
arg(calcitems[8].max, 0, 'f', 1));
|
|
||||||
}
|
|
||||||
QString txt = presstr.join(" ");
|
|
||||||
graph.renderText(txt, rect.left(), rect.top()-5*graph.printScaleY(), 0);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void gPressureChart::populate(Day * day, int idx)
|
|
||||||
{
|
|
||||||
float tmp;
|
|
||||||
CPAPMode mode = (CPAPMode)(int)qRound(day->settings_wavg(CPAP_Mode));
|
|
||||||
QVector<SummaryChartSlice> & slices = cache[idx];
|
|
||||||
|
|
||||||
if (mode == MODE_CPAP) {
|
|
||||||
float pr = day->settings_max(CPAP_Pressure);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[0], pr, pr, schema::channel[CPAP_Pressure].label(), calcitems[0].color));
|
|
||||||
} else if (mode == MODE_APAP) {
|
|
||||||
float min = day->settings_min(CPAP_PressureMin);
|
|
||||||
float max = day->settings_max(CPAP_PressureMax);
|
|
||||||
|
|
||||||
tmp = min;
|
|
||||||
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[3], min, min, schema::channel[CPAP_PressureMin].label(), calcitems[3].color));
|
|
||||||
if (!day->summaryOnly()) {
|
|
||||||
float med = day->calcMiddle(CPAP_Pressure);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[1], med, med - tmp, day->calcMiddleLabel(CPAP_Pressure), calcitems[1].color));
|
|
||||||
tmp += med - tmp;
|
|
||||||
|
|
||||||
float p90 = day->calcPercentile(CPAP_Pressure);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[2], p90, p90 - tmp, day->calcPercentileLabel(CPAP_Pressure), calcitems[2].color));
|
|
||||||
tmp += p90 - tmp;
|
|
||||||
}
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[4], max, max - tmp, schema::channel[CPAP_PressureMax].label(), calcitems[4].color));
|
|
||||||
|
|
||||||
} else if (mode == MODE_BILEVEL_FIXED) {
|
|
||||||
float epap = day->settings_max(CPAP_EPAP);
|
|
||||||
float ipap = day->settings_max(CPAP_IPAP);
|
|
||||||
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[5], epap, epap, schema::channel[CPAP_EPAP].label(), calcitems[5].color));
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[6], ipap, ipap - epap, schema::channel[CPAP_IPAP].label(), calcitems[6].color));
|
|
||||||
|
|
||||||
} else if (mode == MODE_BILEVEL_AUTO_FIXED_PS) {
|
|
||||||
float epap = day->settings_max(CPAP_EPAPLo);
|
|
||||||
tmp = epap;
|
|
||||||
float ipap = day->settings_max(CPAP_IPAPHi);
|
|
||||||
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[7], epap, epap, schema::channel[CPAP_EPAPLo].label(), calcitems[7].color));
|
|
||||||
if (!day->summaryOnly()) {
|
|
||||||
|
|
||||||
float e50 = day->calcMiddle(CPAP_EPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[9], e50, e50 - tmp, day->calcMiddleLabel(CPAP_EPAP), calcitems[9].color));
|
|
||||||
tmp += e50 - tmp;
|
|
||||||
|
|
||||||
float e90 = day->calcPercentile(CPAP_EPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[10], e90, e90 - tmp, day->calcPercentileLabel(CPAP_EPAP), calcitems[10].color));
|
|
||||||
tmp += e90 - tmp;
|
|
||||||
|
|
||||||
float i50 = day->calcMiddle(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[11], i50, i50 - tmp, day->calcMiddleLabel(CPAP_IPAP), calcitems[11].color));
|
|
||||||
tmp += i50 - tmp;
|
|
||||||
|
|
||||||
float i90 = day->calcPercentile(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[12], i90, i90 - tmp, day->calcPercentileLabel(CPAP_IPAP), calcitems[12].color));
|
|
||||||
tmp += i90 - tmp;
|
|
||||||
}
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[8], ipap, ipap - tmp, schema::channel[CPAP_IPAPHi].label(), calcitems[8].color));
|
|
||||||
} else if ((mode == MODE_BILEVEL_AUTO_VARIABLE_PS) || (mode == MODE_ASV_VARIABLE_EPAP)) {
|
|
||||||
float epap = day->settings_max(CPAP_EPAPLo);
|
|
||||||
tmp = epap;
|
|
||||||
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[7], epap, epap, schema::channel[CPAP_EPAPLo].label(), calcitems[7].color));
|
|
||||||
if (!day->summaryOnly()) {
|
|
||||||
float e50 = day->calcMiddle(CPAP_EPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[9], e50, e50 - tmp, day->calcMiddleLabel(CPAP_EPAP), calcitems[9].color));
|
|
||||||
tmp += e50 - tmp;
|
|
||||||
|
|
||||||
float e90 = day->calcPercentile(CPAP_EPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[10], e90, e90 - tmp, day->calcPercentileLabel(CPAP_EPAP), calcitems[10].color));
|
|
||||||
tmp += e90 - tmp;
|
|
||||||
|
|
||||||
float i50 = day->calcMiddle(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[11], i50, i50 - tmp, day->calcMiddleLabel(CPAP_IPAP), calcitems[11].color));
|
|
||||||
tmp += i50 - tmp;
|
|
||||||
|
|
||||||
float i90 = day->calcPercentile(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[12], i90, i90 - tmp, day->calcPercentileLabel(CPAP_IPAP), calcitems[12].color));
|
|
||||||
tmp += i90 - tmp;
|
|
||||||
}
|
|
||||||
float ipap = day->settings_max(CPAP_IPAPHi);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[8], ipap, ipap - tmp, schema::channel[CPAP_IPAPHi].label(), calcitems[8].color));
|
|
||||||
} else if (mode == MODE_ASV) {
|
|
||||||
float epap = day->settings_max(CPAP_EPAP);
|
|
||||||
tmp = epap;
|
|
||||||
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[5], epap, epap, schema::channel[CPAP_EPAP].label(), calcitems[5].color));
|
|
||||||
if (!day->summaryOnly()) {
|
|
||||||
float i50 = day->calcMiddle(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[11], i50, i50 - tmp, day->calcMiddleLabel(CPAP_IPAP), calcitems[11].color));
|
|
||||||
tmp += i50 - tmp;
|
|
||||||
|
|
||||||
float i90 = day->calcPercentile(CPAP_IPAP);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[12], i90, i90 - tmp, day->calcPercentileLabel(CPAP_IPAP), calcitems[12].color));
|
|
||||||
tmp += i90 - tmp;
|
|
||||||
}
|
|
||||||
float ipap = day->settings_max(CPAP_IPAPHi);
|
|
||||||
slices.append(SummaryChartSlice(&calcitems[8], ipap, ipap - tmp, schema::channel[CPAP_IPAPHi].label(), calcitems[8].color));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
//void gPressureChart::afterDraw(QPainter &painter, gGraph &graph, QRect rect)
|
|
||||||
//{
|
|
||||||
//}
|
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
/* gSessionTimesChart Header
|
/* gSessionTimesChart Header
|
||||||
*
|
*
|
||||||
|
* Copyright (c) 2020 The Oscar Team
|
||||||
* Copyright (C) 2011-2018 Mark Watkins <mark@jedimark.net>
|
* Copyright (C) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
*
|
*
|
||||||
* This file is subject to the terms and conditions of the GNU General Public
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
@ -188,13 +189,8 @@ public:
|
|||||||
cache.clear();
|
cache.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
virtual int addCalc(ChannelID code, SummaryType type, QColor color);
|
||||||
void addCalc(ChannelID code, SummaryType type, QColor color) {
|
virtual int addCalc(ChannelID code, SummaryType type);
|
||||||
calcitems.append(SummaryCalcItem(code, type, color));
|
|
||||||
}
|
|
||||||
void addCalc(ChannelID code, SummaryType type) {
|
|
||||||
calcitems.append(SummaryCalcItem(code, type, schema::channel[code].defaultColor()));
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual Layer * Clone() {
|
virtual Layer * Clone() {
|
||||||
gSummaryChart * sc = new gSummaryChart(m_label, m_machtype);
|
gSummaryChart * sc = new gSummaryChart(m_label, m_machtype);
|
||||||
@ -429,37 +425,4 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
class gPressureChart : public gSummaryChart
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
gPressureChart();
|
|
||||||
virtual ~gPressureChart() {}
|
|
||||||
|
|
||||||
virtual Layer * Clone() {
|
|
||||||
gPressureChart * sc = new gPressureChart();
|
|
||||||
gSummaryChart::CloneInto(sc);
|
|
||||||
return sc;
|
|
||||||
}
|
|
||||||
|
|
||||||
// virtual void preCalc();
|
|
||||||
virtual void customCalc(Day *day, QVector<SummaryChartSlice> &slices) {
|
|
||||||
int size = slices.size();
|
|
||||||
float hour = day->hours(m_machtype);
|
|
||||||
for (int i=0; i < size; ++i) {
|
|
||||||
SummaryChartSlice & slice = slices[i];
|
|
||||||
SummaryCalcItem * calc = slices[i].calc;
|
|
||||||
|
|
||||||
calc->update(slice.value, hour);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
virtual void afterDraw(QPainter &, gGraph &, QRectF);
|
|
||||||
|
|
||||||
virtual void populate(Day * day, int idx);
|
|
||||||
|
|
||||||
virtual QString tooltipData(Day * day, int idx) {
|
|
||||||
return day->getCPAPModeStr() + "\n" + day->getPressureSettings() + gSummaryChart::tooltipData(day, idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // GSESSIONTIMESCHART_H
|
#endif // GSESSIONTIMESCHART_H
|
||||||
|
@ -134,7 +134,7 @@ bool EDFInfo::parseHeader( EDFHeaderRaw *hdrPtr )
|
|||||||
}
|
}
|
||||||
edfHdr.reserved44=QString::fromLatin1(hdrPtr->reserved, 44).trimmed();
|
edfHdr.reserved44=QString::fromLatin1(hdrPtr->reserved, 44).trimmed();
|
||||||
edfHdr.num_data_records = QString::fromLatin1(hdrPtr->num_data_records, 8).toLong(&ok);
|
edfHdr.num_data_records = QString::fromLatin1(hdrPtr->num_data_records, 8).toLong(&ok);
|
||||||
if (!ok) {
|
if ( (! ok) || (edfHdr.num_data_records < 1) ) {
|
||||||
qWarning() << "EDFInfo::Parse() Bad data record count " << filename;
|
qWarning() << "EDFInfo::Parse() Bad data record count " << filename;
|
||||||
// sleep(1);
|
// sleep(1);
|
||||||
fileData.clear();
|
fileData.clear();
|
||||||
@ -148,7 +148,7 @@ bool EDFInfo::parseHeader( EDFHeaderRaw *hdrPtr )
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
edfHdr.num_signals = QString::fromLatin1(hdrPtr->num_signals, 4).toLong(&ok);
|
edfHdr.num_signals = QString::fromLatin1(hdrPtr->num_signals, 4).toLong(&ok);
|
||||||
if (!ok) {
|
if ( (! ok) || (edfHdr.num_signals < 1) || (edfHdr.num_signals > 256) ) {
|
||||||
qWarning() << "EDFInfo::Parse() Bad number of signals " << filename;
|
qWarning() << "EDFInfo::Parse() Bad number of signals " << filename;
|
||||||
// sleep(1);
|
// sleep(1);
|
||||||
fileData.clear();
|
fileData.clear();
|
||||||
|
@ -982,12 +982,9 @@ void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machin
|
|||||||
// All samples exhibiting this behavior are DreamStations.
|
// All samples exhibiting this behavior are DreamStations.
|
||||||
task->m_wavefiles.append(fi.canonicalFilePath());
|
task->m_wavefiles.append(fi.canonicalFilePath());
|
||||||
} else if (ext == 6) {
|
} else if (ext == 6) {
|
||||||
if (!task->oxifile.isEmpty()) {
|
// Oximetry data can also be split into multiple files, see waveform
|
||||||
qDebug() << sid << "already has oximetry file" << relativePath(task->oxifile)
|
// comment above.
|
||||||
<< "skipping" << relativePath(fi.canonicalFilePath());
|
task->m_oxifiles.append(fi.canonicalFilePath());
|
||||||
continue;
|
|
||||||
}
|
|
||||||
task->oxifile = fi.canonicalFilePath();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
@ -6854,12 +6851,16 @@ bool PRS1DataChunk::ParseSettingsF0V6(const unsigned char* data, int size)
|
|||||||
CHECK_VALUES(data[pos], 1, 2); // 1 when EZ-Start is enabled? 2 when Auto-Trial? 3 when Auto-Trial is off or Opti-Start isn't off?
|
CHECK_VALUES(data[pos], 1, 2); // 1 when EZ-Start is enabled? 2 when Auto-Trial? 3 when Auto-Trial is off or Opti-Start isn't off?
|
||||||
}
|
}
|
||||||
if (len == 2) { // 400G, 500G has extra byte
|
if (len == 2) { // 400G, 500G has extra byte
|
||||||
// 0x80 seen with EZ-Start and CPAP-Check+ on 500X150
|
switch (data[pos+1]) {
|
||||||
if (data[pos+1] != 0x80) {
|
case 0x00: // 0x00 seen with EZ-Start disabled, no auto-trial, with CPAP-Check on 400X110
|
||||||
// 0x10 seen with EZ-Start enabled, Opti-Start off on 500X110
|
case 0x10: // 0x10 seen with EZ-Start enabled, Opti-Start off on 500X110
|
||||||
// 0x20 seen with Opti-Start enabled
|
case 0x20: // 0x20 seen with Opti-Start enabled
|
||||||
// 0x30 seen with both Opti-Start and EZ-Start enabled on 500X110
|
case 0x30: // 0x30 seen with both Opti-Start and EZ-Start enabled on 500X110
|
||||||
CHECK_VALUE(data[pos+1] & ~(0x10 | 0x20), 0);
|
case 0x40: // 0x40 seen with Auto-Trial
|
||||||
|
case 0x80: // 0x80 seen with EZ-Start and CPAP-Check+ on 500X150
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
UNEXPECTED_VALUE(data[pos+1], "[0,0x10,0x20,0x30,0x40,0x80]")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -6919,7 +6920,9 @@ bool PRS1DataChunk::ParseSettingsF0V6(const unsigned char* data, int size)
|
|||||||
// but curiously report the use of C-Flex+, even though Auto-CPAP uses A-Flex.
|
// but curiously report the use of C-Flex+, even though Auto-CPAP uses A-Flex.
|
||||||
CHECK_VALUE(len, 3);
|
CHECK_VALUE(len, 3);
|
||||||
CHECK_VALUES(cpapmode, PRS1_MODE_CPAP, PRS1_MODE_CPAPCHECK);
|
CHECK_VALUES(cpapmode, PRS1_MODE_CPAP, PRS1_MODE_CPAPCHECK);
|
||||||
CHECK_VALUES(data[pos], 30, 5); // Auto-Trial Duration
|
if (data[pos] != 30) {
|
||||||
|
CHECK_VALUES(data[pos], 5, 25); // Auto-Trial Duration
|
||||||
|
}
|
||||||
this->AddEvent(new PRS1ParsedSettingEvent(PRS1_SETTING_AUTO_TRIAL, data[pos]));
|
this->AddEvent(new PRS1ParsedSettingEvent(PRS1_SETTING_AUTO_TRIAL, data[pos]));
|
||||||
// If we want C-Flex+ to be reported as A-Flex, we can set cpapmode = PRS1_MODE_AUTOTRIAL here.
|
// If we want C-Flex+ to be reported as A-Flex, we can set cpapmode = PRS1_MODE_AUTOTRIAL here.
|
||||||
// (Note that the setting event has already been added above, which is why ImportSummary needs
|
// (Note that the setting event has already been added above, which is why ImportSummary needs
|
||||||
@ -7131,7 +7134,7 @@ bool PRS1DataChunk::ParseSummaryF0V6(void)
|
|||||||
qWarning() << this->sessionid << "summary data too short:" << chunk_size;
|
qWarning() << this->sessionid << "summary data too short:" << chunk_size;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (chunk_size < 60) UNEXPECTED_VALUE(chunk_size, ">= 60");
|
if (chunk_size < 59) UNEXPECTED_VALUE(chunk_size, ">= 59");
|
||||||
|
|
||||||
bool ok = true;
|
bool ok = true;
|
||||||
int pos = 0;
|
int pos = 0;
|
||||||
@ -8152,7 +8155,7 @@ QList<PRS1DataChunk *> PRS1Import::CoalesceWaveformChunks(QList<PRS1DataChunk *>
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void PRS1Import::ParseOximetry()
|
void PRS1Import::ImportOximetry()
|
||||||
{
|
{
|
||||||
int size = oximetry.size();
|
int size = oximetry.size();
|
||||||
|
|
||||||
@ -8208,10 +8211,10 @@ void PRS1Import::ImportOximetryChannel(ChannelID channel, QByteArray & data, qui
|
|||||||
quint64 start_ti;
|
quint64 start_ti;
|
||||||
int start_i;
|
int start_i;
|
||||||
|
|
||||||
// Split eventlist on invalid values (255)
|
// Split eventlist on invalid values (254-255)
|
||||||
for (int i=0; i < data.size(); i++) {
|
for (int i=0; i < data.size(); i++) {
|
||||||
unsigned char value = raw[i];
|
unsigned char value = raw[i];
|
||||||
bool valid = (value != 255);
|
bool valid = (value < 254);
|
||||||
|
|
||||||
if (valid) {
|
if (valid) {
|
||||||
if (pending_samples == false) {
|
if (pending_samples == false) {
|
||||||
@ -8221,7 +8224,7 @@ void PRS1Import::ImportOximetryChannel(ChannelID channel, QByteArray & data, qui
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (channel == OXI_Pulse) {
|
if (channel == OXI_Pulse) {
|
||||||
if (value > 200) UNEXPECTED_VALUE(value, "<= 200 bpm");
|
if (value > 240) UNEXPECTED_VALUE(value, "<= 240 bpm");
|
||||||
} else {
|
} else {
|
||||||
if (value > 100) UNEXPECTED_VALUE(value, "<= 100%");
|
if (value > 100) UNEXPECTED_VALUE(value, "<= 100%");
|
||||||
}
|
}
|
||||||
@ -8245,7 +8248,7 @@ void PRS1Import::ImportOximetryChannel(ChannelID channel, QByteArray & data, qui
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void PRS1Import::ParseWaveforms()
|
void PRS1Import::ImportWaveforms()
|
||||||
{
|
{
|
||||||
int size = waveforms.size();
|
int size = waveforms.size();
|
||||||
quint64 s1, s2;
|
quint64 s1, s2;
|
||||||
@ -8413,7 +8416,7 @@ bool PRS1Import::ParseSession(void)
|
|||||||
|
|
||||||
// If are no mask-on slices, then there's not any meaningful event or waveform data for the session.
|
// If are no mask-on slices, then there's not any meaningful event or waveform data for the session.
|
||||||
// If there's no no event or waveform data, mark this session as a summary.
|
// If there's no no event or waveform data, mark this session as a summary.
|
||||||
if (session->m_slices.count() == 0 || (m_event_chunks.count() == 0 && m_wavefiles.isEmpty() && oxifile.isEmpty())) {
|
if (session->m_slices.count() == 0 || (m_event_chunks.count() == 0 && m_wavefiles.isEmpty() && m_oxifiles.isEmpty())) {
|
||||||
session->setSummaryOnly(true);
|
session->setSummaryOnly(true);
|
||||||
save = true;
|
save = true;
|
||||||
break; // and skip the occasional fragmentary event or waveform data
|
break; // and skip the occasional fragmentary event or waveform data
|
||||||
@ -8430,14 +8433,18 @@ bool PRS1Import::ParseSession(void)
|
|||||||
|
|
||||||
if (!m_wavefiles.isEmpty()) {
|
if (!m_wavefiles.isEmpty()) {
|
||||||
// Parse .005 Waveform files
|
// Parse .005 Waveform files
|
||||||
|
waveforms = ReadWaveformData(m_wavefiles, "Waveform");
|
||||||
|
|
||||||
|
// Extract and import raw data into channels.
|
||||||
ImportWaveforms();
|
ImportWaveforms();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!oxifile.isEmpty()) {
|
if (!m_oxifiles.isEmpty()) {
|
||||||
// Parse .006 Waveform file
|
// Parse .006 Waveform files
|
||||||
oximetry = loader->ParseFile(oxifile);
|
oximetry = ReadWaveformData(m_oxifiles, "Oximetry");
|
||||||
oximetry = CoalesceWaveformChunks(oximetry);
|
|
||||||
ParseOximetry();
|
// Extract and import raw data into channels.
|
||||||
|
ImportOximetry();
|
||||||
}
|
}
|
||||||
|
|
||||||
save = true;
|
save = true;
|
||||||
@ -8447,16 +8454,17 @@ bool PRS1Import::ParseSession(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void PRS1Import::ImportWaveforms()
|
QList<PRS1DataChunk *> PRS1Import::ReadWaveformData(QList<QString> & files, const char* label)
|
||||||
{
|
{
|
||||||
QMap<qint64,PRS1DataChunk *> waveform_chunks;
|
QMap<qint64,PRS1DataChunk *> waveform_chunks;
|
||||||
|
QList<PRS1DataChunk *> result;
|
||||||
|
|
||||||
if (m_wavefiles.count() > 1) {
|
if (files.count() > 1) {
|
||||||
qDebug() << session->session() << "Waveform data split across multiple files";
|
qDebug() << session->session() << label << "data split across multiple files";
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & f : m_wavefiles) {
|
for (auto & f : files) {
|
||||||
// Parse a single .005 Waveform file
|
// Parse a single .005 or .006 waveform file
|
||||||
QList<PRS1DataChunk *> file_chunks = loader->ParseFile(f);
|
QList<PRS1DataChunk *> file_chunks = loader->ParseFile(f);
|
||||||
for (auto & chunk : file_chunks) {
|
for (auto & chunk : file_chunks) {
|
||||||
PRS1DataChunk* previous = waveform_chunks[chunk->timestamp];
|
PRS1DataChunk* previous = waveform_chunks[chunk->timestamp];
|
||||||
@ -8471,21 +8479,12 @@ void PRS1Import::ImportWaveforms()
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the list of pointers sorted by timestamp.
|
// Get the list of pointers sorted by timestamp.
|
||||||
waveforms = waveform_chunks.values();
|
result = waveform_chunks.values();
|
||||||
|
|
||||||
// Coalesce contiguous waveform chunks into larger chunks.
|
// Coalesce contiguous waveform chunks into larger chunks.
|
||||||
waveforms = CoalesceWaveformChunks(waveforms);
|
result = CoalesceWaveformChunks(result);
|
||||||
|
|
||||||
if (session->eventlist.contains(CPAP_FlowRate)) {
|
return result;
|
||||||
if (waveforms.size() > 0) {
|
|
||||||
// Delete anything called "Flow rate" picked up in the events file if real data is present
|
|
||||||
qWarning() << session->session() << "Deleting flow rate events due to flow rate waveform data";
|
|
||||||
session->destroyEvent(CPAP_FlowRate);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract raw data into channels.
|
|
||||||
ParseWaveforms();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -295,7 +295,7 @@ public:
|
|||||||
|
|
||||||
|
|
||||||
QList<QString> m_wavefiles;
|
QList<QString> m_wavefiles;
|
||||||
QString oxifile;
|
QList<QString> m_oxifiles;
|
||||||
|
|
||||||
//! \brief Imports .000 files for bricks.
|
//! \brief Imports .000 files for bricks.
|
||||||
bool ImportCompliance();
|
bool ImportCompliance();
|
||||||
@ -306,17 +306,17 @@ public:
|
|||||||
//! \brief Imports the .002 event file(s).
|
//! \brief Imports the .002 event file(s).
|
||||||
bool ImportEvents();
|
bool ImportEvents();
|
||||||
|
|
||||||
//! \brief Imports the .005 event file(s).
|
//! \brief Reads the .005 or .006 waveform file(s).
|
||||||
void ImportWaveforms();
|
QList<PRS1DataChunk *> ReadWaveformData(QList<QString> & files, const char* label);
|
||||||
|
|
||||||
//! \brief Coalesce contiguous .005 or .006 waveform chunks from the file into larger chunks for import.
|
//! \brief Coalesce contiguous .005 or .006 waveform chunks from the file into larger chunks for import.
|
||||||
QList<PRS1DataChunk *> CoalesceWaveformChunks(QList<PRS1DataChunk *> & allchunks);
|
QList<PRS1DataChunk *> CoalesceWaveformChunks(QList<PRS1DataChunk *> & allchunks);
|
||||||
|
|
||||||
//! \brief Takes the parsed list of Flow/MaskPressure waveform chunks and adds them to the database
|
//! \brief Takes the parsed list of Flow/MaskPressure waveform chunks and adds them to the database
|
||||||
void ParseWaveforms();
|
void ImportWaveforms();
|
||||||
|
|
||||||
//! \brief Takes the parsed list of oximeter waveform chunks and adds them to the database.
|
//! \brief Takes the parsed list of oximeter waveform chunks and adds them to the database.
|
||||||
void ParseOximetry();
|
void ImportOximetry();
|
||||||
|
|
||||||
//! \brief Adds a single channel of continuous oximetry data to the database, splitting on any missing samples.
|
//! \brief Adds a single channel of continuous oximetry data to the database, splitting on any missing samples.
|
||||||
void ImportOximetryChannel(ChannelID channel, QByteArray & data, quint64 ti, qint64 dur);
|
void ImportOximetryChannel(ChannelID channel, QByteArray & data, quint64 ti, qint64 dur);
|
||||||
|
@ -30,7 +30,11 @@ ResMedEDFInfo::~ResMedEDFInfo() { }
|
|||||||
|
|
||||||
bool ResMedEDFInfo::Parse( ) // overrides and calls the super's Parse
|
bool ResMedEDFInfo::Parse( ) // overrides and calls the super's Parse
|
||||||
{
|
{
|
||||||
EDFInfo::Parse( );
|
if ( ! EDFInfo::Parse( ) ) {
|
||||||
|
qWarning() << "EDFInfo::Parse failed!";
|
||||||
|
sleep(1);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// Now massage some stuff into OSCAR's layout
|
// Now massage some stuff into OSCAR's layout
|
||||||
int snp = edfHdr.recordingident.indexOf("SRN=");
|
int snp = edfHdr.recordingident.indexOf("SRN=");
|
||||||
|
@ -229,14 +229,15 @@ class STRFile
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
STRFile() :
|
STRFile() :
|
||||||
filename(QString()), edf(nullptr) {}
|
filename(QString()), days(0), edf(nullptr) {}
|
||||||
STRFile(QString name, ResMedEDFInfo *str) :
|
STRFile(QString name, long int recCnt, ResMedEDFInfo *str) :
|
||||||
filename(name), edf(str) {}
|
filename(name), days(recCnt), edf(str) {}
|
||||||
STRFile(const STRFile & copy) = default;
|
STRFile(const STRFile & copy) = default;
|
||||||
|
|
||||||
virtual ~STRFile() {}
|
virtual ~STRFile() {}
|
||||||
|
|
||||||
QString filename;
|
QString filename;
|
||||||
|
long int days;
|
||||||
ResMedEDFInfo * edf;
|
ResMedEDFInfo * edf;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -284,14 +284,17 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
|
|
||||||
// Check DATALOG folder exists and is readable
|
// Check DATALOG folder exists and is readable
|
||||||
if (!QDir().exists(datalogPath)) {
|
if (!QDir().exists(datalogPath)) {
|
||||||
|
qDebug() << "Missing DATALOG in" << dirpath;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
m_abort = false;
|
m_abort = false;
|
||||||
MachineInfo info = newInfo();
|
MachineInfo info = newInfo();
|
||||||
|
|
||||||
if ( ! parseIdentTGT(path, & info, idmap) )
|
if ( ! parseIdentTGT(path, & info, idmap) ) {
|
||||||
|
qDebug() << "Failed to parse Identification.tgt";
|
||||||
return -1;
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
qDebug() << "Info:" << info.series << info.model << info.modelnumber << info.serial;
|
qDebug() << "Info:" << info.series << info.model << info.modelnumber << info.serial;
|
||||||
#ifdef IDENT_DEBUG
|
#ifdef IDENT_DEBUG
|
||||||
@ -303,7 +306,7 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
|
|
||||||
// Abort if no serial number
|
// Abort if no serial number
|
||||||
if (info.serial.isEmpty()) {
|
if (info.serial.isEmpty()) {
|
||||||
qDebug() << "ResMed Data card has no valid serial number in Indentification.tgt";
|
qDebug() << "ResMed Data card is missing serial number in Indentification.tgt";
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -383,9 +386,34 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
if ( ! dir.exists(strBackupPath))
|
if ( ! dir.exists(strBackupPath))
|
||||||
dir.mkpath(strBackupPath);
|
dir.mkpath(strBackupPath);
|
||||||
|
|
||||||
|
QString newpath = backup_path + "DATALOG";
|
||||||
|
if ( ! dir.exists(newpath) )
|
||||||
|
dir.mkpath(newpath);
|
||||||
|
|
||||||
if ( ! importing_backups ) {
|
if ( ! importing_backups ) {
|
||||||
BackupSTRfiles( strpath, path, strBackupPath, info, STRmap );
|
BackupSTRfiles( strpath, path, strBackupPath, info, STRmap );
|
||||||
} // end if not importing the backup files
|
} else { // get the STR file that is in the BACKUP folder
|
||||||
|
ResMedEDFInfo * stredf = new ResMedEDFInfo();
|
||||||
|
if ( stredf->Open(strpath) ) {
|
||||||
|
if ( stredf->Parse()) {
|
||||||
|
if (stredf->serialnumber != info.serial) {
|
||||||
|
qDebug() << "Identification.tgt Serial number doesn't match" << strpath;
|
||||||
|
delete stredf;
|
||||||
|
} else { // passed the tests, stuff it into the map
|
||||||
|
QDate date = stredf->edfHdr.startdate_orig.date();
|
||||||
|
long int days = stredf->GetNumDataRecords();
|
||||||
|
qDebug() << strpath << "starts at" << date << "for" << days;
|
||||||
|
STRmap[date] = STRFile(strpath, days, stredf);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "Faulty STR file" << strpath;
|
||||||
|
delete stredf;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "Failed to open" << strpath;
|
||||||
|
delete stredf;
|
||||||
|
}
|
||||||
|
} // end if not importing the backup files
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "STRmap size is " << STRmap.size();
|
qDebug() << "STRmap size is " << STRmap.size();
|
||||||
#endif
|
#endif
|
||||||
@ -396,6 +424,7 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
dir.setFilter(QDir::Files | QDir::Hidden | QDir::Readable);
|
dir.setFilter(QDir::Files | QDir::Hidden | QDir::Readable);
|
||||||
QFileInfoList flist = dir.entryInfoList();
|
QFileInfoList flist = dir.entryInfoList();
|
||||||
QDate date;
|
QDate date;
|
||||||
|
long int days;
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "STR_Backup folder size is " << flist.size();
|
qDebug() << "STR_Backup folder size is " << flist.size();
|
||||||
#endif
|
#endif
|
||||||
@ -407,11 +436,13 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
continue;
|
continue;
|
||||||
if (!(filename.endsWith("edf.gz", Qt::CaseInsensitive) || filename.endsWith("edf", Qt::CaseInsensitive)))
|
if (!(filename.endsWith("edf.gz", Qt::CaseInsensitive) || filename.endsWith("edf", Qt::CaseInsensitive)))
|
||||||
continue;
|
continue;
|
||||||
QString datestr = filename.section("STR-",-1).section(".edf",0,0)+"01";
|
QString datestr = filename.section("STR-",-1).section(".edf",0,0); // +"01";
|
||||||
date = QDate().fromString(datestr,"yyyyMMdd");
|
// date = QDate().fromString(datestr,"yyyyMMdd");
|
||||||
|
//
|
||||||
if (STRmap.contains(date))
|
// if (STRmap.contains(date)) {
|
||||||
continue;
|
// qDebug() << filename << "overlaps anothor STR file";
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
|
||||||
ResMedEDFInfo * stredf = new ResMedEDFInfo();
|
ResMedEDFInfo * stredf = new ResMedEDFInfo();
|
||||||
if ( ! stredf->Open(fi.canonicalFilePath() ) ) {
|
if ( ! stredf->Open(fi.canonicalFilePath() ) ) {
|
||||||
@ -433,10 +464,20 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
|
|
||||||
// Don't trust the filename date, pick the one inside the STR...
|
// Don't trust the filename date, pick the one inside the STR...
|
||||||
date = stredf->edfHdr.startdate_orig.date();
|
date = stredf->edfHdr.startdate_orig.date();
|
||||||
qDebug() << "Resetting STR date from" << date.toString() << "to first of month ... WHY???";
|
days = stredf->GetNumDataRecords();
|
||||||
date = QDate(date.year(), date.month(), 1);
|
if (STRmap.contains(date)) { // Keep the longer of the two STR files
|
||||||
|
qDebug() << filename << "overlaps" << STRmap[date].filename << "for" << days;
|
||||||
|
if (days <= STRmap[date].days) {
|
||||||
|
qDebug() << "Skipping" << filename;
|
||||||
|
delete stredf;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// qDebug() << "Resetting STR date from" << date.toString() << "to first of month ... WHY???";
|
||||||
|
// date = QDate(date.year(), date.month(), 1);
|
||||||
|
|
||||||
STRmap[date] = STRFile(fi.canonicalFilePath(), stredf);
|
qDebug() << fi.canonicalFilePath() << "starts at" << date << "for" << days;
|
||||||
|
STRmap[date] = STRFile(fi.canonicalFilePath(), days, stredf);
|
||||||
} // end for walking the STR_Backup directory
|
} // end for walking the STR_Backup directory
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "STRmap size is now " << STRmap.size();
|
qDebug() << "STRmap size is now " << STRmap.size();
|
||||||
@ -453,9 +494,21 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
|
|
||||||
// We are done with the Parsed STR EDF objects, so delete them
|
// We are done with the Parsed STR EDF objects, so delete them
|
||||||
for (auto it=STRmap.begin(), end=STRmap.end(); it != end; ++it) {
|
for (auto it=STRmap.begin(), end=STRmap.end(); it != end; ++it) {
|
||||||
|
QString fullname = it.value().filename;
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "Deleting edf of" << it.value().filename;
|
qDebug() << "Deleting edf of" << fullname;
|
||||||
#endif
|
#endif
|
||||||
|
QString datepart = fullname.section("STR-",-1).section(".edf",0,0);
|
||||||
|
if (datepart.size() == 6 ) { // old style name, change to full date
|
||||||
|
QFile str(fullname);
|
||||||
|
QString newdate = it.key().toString("yyyyMMdd");
|
||||||
|
QString newName = fullname.replace(datepart, newdate);
|
||||||
|
qDebug() << "Renaming" << it.value().filename << "to" << newName;
|
||||||
|
if ( str.rename(newName) )
|
||||||
|
qDebug() << "Success";
|
||||||
|
else
|
||||||
|
qDebug() << "Failed";
|
||||||
|
}
|
||||||
delete it.value().edf;
|
delete it.value().edf;
|
||||||
}
|
}
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
@ -531,6 +584,8 @@ int ResmedLoader::Open(const QString & dirpath)
|
|||||||
|
|
||||||
sessionCount = 0;
|
sessionCount = 0;
|
||||||
emit updateMessage(QObject::tr("Importing Sessions..."));
|
emit updateMessage(QObject::tr("Importing Sessions..."));
|
||||||
|
|
||||||
|
// Walk down the resDay list
|
||||||
runTasks();
|
runTasks();
|
||||||
int num_new_sessions = sessionCount;
|
int num_new_sessions = sessionCount;
|
||||||
|
|
||||||
@ -683,7 +738,7 @@ int ResmedLoader::ScanFiles(Machine * mach, const QString & datalog_path, QDate
|
|||||||
filename = fi.fileName();
|
filename = fi.fileName();
|
||||||
|
|
||||||
int len = filename.length();
|
int len = filename.length();
|
||||||
if (len == 4) { // when does this happen?
|
if (len == 4) { // This is a year folder in BackupDATALOG
|
||||||
filename.toInt(&ok);
|
filename.toInt(&ok);
|
||||||
if ( ! ok ) {
|
if ( ! ok ) {
|
||||||
qDebug() << "Skipping directory - bad 4-letter name" << filename;
|
qDebug() << "Skipping directory - bad 4-letter name" << filename;
|
||||||
@ -836,12 +891,13 @@ QString ResmedLoader::Backup(const QString & fullname, const QString & backup_pa
|
|||||||
yearstr.toInt(&ok, 10);
|
yearstr.toInt(&ok, 10);
|
||||||
|
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
qDebug() << "Invalid EDF filename given to ResMedLoader::backup()" << fullname;
|
qDebug() << "Invalid EDF filename given to ResMedLoader::Backup()" << fullname;
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
QString newpath = backup_path + RMS9_STR_datalog + "/" + yearstr;
|
QString newpath = backup_path + "DATALOG" + "/" + yearstr;
|
||||||
!dir.exists(newpath) && dir.mkpath(newpath);
|
if ( ! dir.exists(newpath) )
|
||||||
|
dir.mkpath(newpath);
|
||||||
|
|
||||||
newname = newpath+"/"+filename;
|
newname = newpath+"/"+filename;
|
||||||
|
|
||||||
@ -894,10 +950,11 @@ bool ResmedLoader::ProcessSTRfiles(Machine *mach, QMap<QDate, STRFile> & STRmap,
|
|||||||
for (auto it=STRmap.begin(), end=STRmap.end(); it != end; ++it) {
|
for (auto it=STRmap.begin(), end=STRmap.end(); it != end; ++it) {
|
||||||
STRFile & file = it.value();
|
STRFile & file = it.value();
|
||||||
ResMedEDFInfo & str = *file.edf;
|
ResMedEDFInfo & str = *file.edf;
|
||||||
totalRecs += str.GetNumDataRecords();
|
int days = str.GetNumDataRecords();
|
||||||
|
totalRecs += days;
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "STR file is" << file.filename;
|
qDebug() << "STR file is" << file.filename;
|
||||||
qDebug() << "First day" << QDateTime::fromMSecsSinceEpoch(str.startdate, EDFInfo::localNoDST).date().toString() << "for" << totalRecs << "days";
|
qDebug() << "First day" << QDateTime::fromMSecsSinceEpoch(str.startdate, EDFInfo::localNoDST).date().toString() << "for" << days << "days";
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -917,7 +974,7 @@ bool ResmedLoader::ProcessSTRfiles(Machine *mach, QMap<QDate, STRFile> & STRmap,
|
|||||||
QDate lastDay = date.addDays(size-1);
|
QDate lastDay = date.addDays(size-1);
|
||||||
|
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "Parsing" << strfile << date.toString() << size << str.GetNumSignals();
|
qDebug() << "Processing" << strfile << date.toString() << size << str.GetNumSignals();
|
||||||
qDebug() << "Last day is" << lastDay;
|
qDebug() << "Last day is" << lastDay;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -1383,6 +1440,9 @@ bool ResmedLoader::ProcessSTRfiles(Machine *mach, QMap<QDate, STRFile> & STRmap,
|
|||||||
qDebug() << "Finished" << date.toString();
|
qDebug() << "Finished" << date.toString();
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
#ifdef STR_DEBUG
|
||||||
|
qDebug() << "Finished" << strfile;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "Finished ProcessSTR";
|
qDebug() << "Finished ProcessSTR";
|
||||||
@ -1462,13 +1522,17 @@ void BackupSTRfiles( const QString strpath, const QString path, const QString st
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
QDate date = stredf->edfHdr.startdate_orig.date();
|
QDate date = stredf->edfHdr.startdate_orig.date();
|
||||||
date = QDate(date.year(), date.month(), 1);
|
long int days = stredf->GetNumDataRecords();
|
||||||
|
// date = QDate(date.year(), date.month(), 1);
|
||||||
if (STRmap.contains(date)) {
|
if (STRmap.contains(date)) {
|
||||||
qDebug() << "STRmap already contains" << date.toString("YYYY-MM-dd");
|
qDebug() << "STRmap already contains" << date.toString("YYYY-MM-dd");
|
||||||
delete stredf;
|
if ( days <= STRmap[date].days ) {
|
||||||
continue;
|
qDebug() << "Skipping" << filename;
|
||||||
|
delete stredf;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
QString newname = "STR-"+date.toString("yyyyMM")+"."+STR_ext_EDF;
|
QString newname = "STR-"+date.toString("yyyyMMdd")+"."+STR_ext_EDF;
|
||||||
|
|
||||||
QString backupfile = strBackupPath+"/"+newname;
|
QString backupfile = strBackupPath+"/"+newname;
|
||||||
|
|
||||||
@ -1502,7 +1566,7 @@ void BackupSTRfiles( const QString strpath, const QString path, const QString st
|
|||||||
else
|
else
|
||||||
QFile::exists(gzfile) && QFile::remove(gzfile);
|
QFile::exists(gzfile) && QFile::remove(gzfile);
|
||||||
|
|
||||||
STRmap[date] = STRFile(backupfile, stredf);
|
STRmap[date] = STRFile(backupfile, days, stredf);
|
||||||
} // end for walking the STR files list
|
} // end for walking the STR files list
|
||||||
#ifdef STR_DEBUG
|
#ifdef STR_DEBUG
|
||||||
qDebug() << "STRmap has" << STRmap.size() << "entries";
|
qDebug() << "STRmap has" << STRmap.size() << "entries";
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
//********************************************************************************************
|
//********************************************************************************************
|
||||||
// Please INCREMENT the following value when making changes to this loaders implementation.
|
// Please INCREMENT the following value when making changes to this loaders implementation.
|
||||||
//
|
//
|
||||||
const int resmed_data_version = 13;
|
const int resmed_data_version = 14;
|
||||||
//
|
//
|
||||||
//********************************************************************************************
|
//********************************************************************************************
|
||||||
|
|
||||||
|
@ -303,11 +303,8 @@ bool ViatomFile::ParseHeader()
|
|||||||
//int spo2_avg = header[17];
|
//int spo2_avg = header[17];
|
||||||
//int spo2_min = header[18];
|
//int spo2_min = header[18];
|
||||||
//int spo2_3pct = header[19]; // number of events
|
//int spo2_3pct = header[19]; // number of events
|
||||||
int spo2_4pct = header[20]; // number of events
|
//int spo2_4pct = header[20]; // number of events
|
||||||
if (header[21] > spo2_4pct) {
|
//CHECK_VALUE(header[21], 0); // ??? sometimes nonzero; maybe pulse spike, not a threshold of SpO2 or pulse, not always smaller than spo2_4pct
|
||||||
//CHECK_VALUE(header[21], 0); // sometimes nonzero; maybe spo2_5pct or something like that?
|
|
||||||
UNEXPECTED_VALUE(header[21], "< drops over 4%");
|
|
||||||
}
|
|
||||||
//int time_under_90pct = header[22] | (header[23] << 8); // in seconds
|
//int time_under_90pct = header[22] | (header[23] << 8); // in seconds
|
||||||
//int events_under_90pct = header[24]; // number of distinct events
|
//int events_under_90pct = header[24]; // number of distinct events
|
||||||
//float o2_score = header[25] * 0.1;
|
//float o2_score = header[25] * 0.1;
|
||||||
|
@ -566,11 +566,12 @@ void Profile::DataFormatError(Machine *m)
|
|||||||
// Note: I deliberately haven't added a Profile help for this
|
// Note: I deliberately haven't added a Profile help for this
|
||||||
if (backups) {
|
if (backups) {
|
||||||
MachineLoader * loader = lookupLoader(m);
|
MachineLoader * loader = lookupLoader(m);
|
||||||
/* int c = */
|
int c = mainwin->importCPAP(ImportPath(m->getBackupPath(), loader),
|
||||||
mainwin->importCPAP(ImportPath(m->getBackupPath(), loader),
|
QObject::tr("Rebuilding from %1 Backup").arg(m->brand()));
|
||||||
QObject::tr("Rebuilding from %1 Backup").arg(m->brand()));
|
if (c >= 0) {
|
||||||
// if ( c > 0 )
|
// Make sure the updated version gets saved, even if there were no sessions to import.
|
||||||
// m->info.version = loader->Version();
|
mainwin->finishCPAPImport();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!p_profile->session->backupCardData()) {
|
if (!p_profile->session->backupCardData()) {
|
||||||
// Automatic backups not available for Intellipap users yet, so don't taunt them..
|
// Automatic backups not available for Intellipap users yet, so don't taunt them..
|
||||||
|
@ -194,6 +194,7 @@ class Session
|
|||||||
t += slice.end - slice.start;
|
t += slice.end - slice.start;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
t = t / 3600000.0;
|
||||||
}
|
}
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
/* ExportCSV module implementation
|
/* ExportCSV module implementation
|
||||||
*
|
*
|
||||||
|
* Copyright (c) 2020 The OSCAR Team
|
||||||
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
*
|
*
|
||||||
* This file is subject to the terms and conditions of the GNU General Public
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
@ -180,24 +181,16 @@ void ExportCSV::on_exportButton_clicked()
|
|||||||
countlist.append(CPAP_UserFlag2);
|
countlist.append(CPAP_UserFlag2);
|
||||||
countlist.append(CPAP_PressurePulse);
|
countlist.append(CPAP_PressurePulse);
|
||||||
|
|
||||||
avglist.append(CPAP_Pressure);
|
QVector<ChannelID> statChannels = { CPAP_Pressure, CPAP_PressureSet, CPAP_IPAP, CPAP_IPAPSet, CPAP_EPAP, CPAP_EPAPSet, CPAP_FLG };
|
||||||
avglist.append(CPAP_IPAP);
|
for (auto & chan : statChannels) {
|
||||||
avglist.append(CPAP_EPAP);
|
avglist.append(chan);
|
||||||
avglist.append(CPAP_FLG); // Pholynyk, 25Aug2015, add ResMed Flow Limitation
|
p90list.append(chan);
|
||||||
|
maxlist.append(chan);
|
||||||
p90list.append(CPAP_Pressure);
|
}
|
||||||
p90list.append(CPAP_IPAP);
|
|
||||||
p90list.append(CPAP_EPAP);
|
|
||||||
p90list.append(CPAP_FLG);
|
|
||||||
|
|
||||||
float percentile=p_profile->general->prefCalcPercentile()/100.0; // Pholynyk, 18Aug2015
|
float percentile=p_profile->general->prefCalcPercentile()/100.0; // Pholynyk, 18Aug2015
|
||||||
EventDataType percent = percentile; // was 0.90F
|
EventDataType percent = percentile; // was 0.90F
|
||||||
|
|
||||||
maxlist.append(CPAP_Pressure); // Pholynyk, 18Aug2015, add maximums
|
|
||||||
maxlist.append(CPAP_IPAP);
|
|
||||||
maxlist.append(CPAP_EPAP);
|
|
||||||
maxlist.append(CPAP_FLG);
|
|
||||||
|
|
||||||
// Not sure this section should be translateable.. :-/
|
// Not sure this section should be translateable.. :-/
|
||||||
if (ui->rb1_details->isChecked()) {
|
if (ui->rb1_details->isChecked()) {
|
||||||
header = tr("DateTime") + sep + tr("Session") + sep + tr("Event") + sep + tr("Data/Duration");
|
header = tr("DateTime") + sep + tr("Session") + sep + tr("Event") + sep + tr("Data/Duration");
|
||||||
|
@ -72,7 +72,7 @@ void initializeLogger()
|
|||||||
s_LoggerRunning.lock(); // wait until the thread begins running
|
s_LoggerRunning.lock(); // wait until the thread begins running
|
||||||
s_LoggerRunning.unlock(); // we no longer need the lock
|
s_LoggerRunning.unlock(); // we no longer need the lock
|
||||||
}
|
}
|
||||||
qInstallMessageHandler(MyOutputHandler);
|
qInstallMessageHandler(MyOutputHandler); // NOTE: comment this line out when debugging a crash, otherwise the deferred output will mislead you.
|
||||||
if (b) {
|
if (b) {
|
||||||
qDebug() << "Started logging thread";
|
qDebug() << "Started logging thread";
|
||||||
} else {
|
} else {
|
||||||
|
@ -196,11 +196,11 @@ bool migrateFromSH(QString destDir) {
|
|||||||
|
|
||||||
if (!file.exists() || !dirP.exists()) { // It doesn't have a Preferences.xml file or a Profiles directory in it
|
if (!file.exists() || !dirP.exists()) { // It doesn't have a Preferences.xml file or a Profiles directory in it
|
||||||
// Not a new directory.. nag the user.
|
// Not a new directory.. nag the user.
|
||||||
if (QMessageBox::warning(nullptr, STR_MessageBox_Error,
|
QMessageBox::warning(nullptr, STR_MessageBox_Error,
|
||||||
QObject::tr("The folder you chose does not contain valid SleepyHead data.") +
|
QObject::tr("The folder you chose does not contain valid SleepyHead data.") +
|
||||||
"\n\n"+QObject::tr("You cannot use this folder:")+" " + datadir ), QMessageBox::Ok) {
|
"\n\n"+QObject::tr("You cannot use this folder:")+" " + datadir,
|
||||||
continue; // Nope, don't use it, go around the loop again
|
QMessageBox::Ok);
|
||||||
}
|
continue; // Nope, don't use it, go around the loop again
|
||||||
}
|
}
|
||||||
|
|
||||||
qDebug() << "Migration folder is" << datadir;
|
qDebug() << "Migration folder is" << datadir;
|
||||||
|
@ -743,7 +743,8 @@ void MainWindow::finishCPAPImport()
|
|||||||
GenerateStatistics();
|
GenerateStatistics();
|
||||||
profileSelector->updateProfileList();
|
profileSelector->updateProfileList();
|
||||||
|
|
||||||
welcome->refreshPage();
|
if (welcome)
|
||||||
|
welcome->refreshPage();
|
||||||
|
|
||||||
if (overview) { overview->ReloadGraphs(); }
|
if (overview) { overview->ReloadGraphs(); }
|
||||||
if (daily) {
|
if (daily) {
|
||||||
@ -2504,10 +2505,17 @@ void MainWindow::on_actionImport_Viatom_Data_triggered()
|
|||||||
#if defined(Q_OS_WIN)
|
#if defined(Q_OS_WIN)
|
||||||
// Windows can't handle this name filter.
|
// Windows can't handle this name filter.
|
||||||
w.setOption(QFileDialog::DontUseNativeDialog, true);
|
w.setOption(QFileDialog::DontUseNativeDialog, true);
|
||||||
|
// And since the non-native dialog can't select both directories and files,
|
||||||
|
// it needs the following to enable selecting multiple files.
|
||||||
|
w.setFileMode(QFileDialog::ExistingFiles);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (w.exec() == QFileDialog::Accepted) {
|
if (w.exec() == QFileDialog::Accepted) {
|
||||||
QString filename = w.selectedFiles()[0];
|
QString filename = w.selectedFiles()[0];
|
||||||
|
if (w.selectedFiles().size() > 1) {
|
||||||
|
// The user selected multiple files in a directory, so use the parent directory as the filename.
|
||||||
|
filename = QFileInfo(filename).absoluteDir().canonicalPath();
|
||||||
|
}
|
||||||
|
|
||||||
int c = viatom.Open(filename);
|
int c = viatom.Open(filename);
|
||||||
if (c > 0) {
|
if (c > 0) {
|
||||||
|
@ -164,6 +164,7 @@ class MainWindow : public QMainWindow
|
|||||||
void setStatsHTML(QString html);
|
void setStatsHTML(QString html);
|
||||||
|
|
||||||
int importCPAP(ImportPath import, const QString &message);
|
int importCPAP(ImportPath import, const QString &message);
|
||||||
|
void finishCPAPImport();
|
||||||
|
|
||||||
void startImportDialog() { on_action_Import_Data_triggered(); }
|
void startImportDialog() { on_action_Import_Data_triggered(); }
|
||||||
|
|
||||||
@ -365,7 +366,6 @@ class MainWindow : public QMainWindow
|
|||||||
private:
|
private:
|
||||||
QString getMainWindowTitle();
|
QString getMainWindowTitle();
|
||||||
void importCPAPBackups();
|
void importCPAPBackups();
|
||||||
void finishCPAPImport();
|
|
||||||
QList<ImportPath> detectCPAPCards();
|
QList<ImportPath> detectCPAPCards();
|
||||||
QList<ImportPath> selectCPAPDataCards(const QString & prompt);
|
QList<ImportPath> selectCPAPDataCards(const QString & prompt);
|
||||||
void importCPAPDataCards(const QList<ImportPath> & datacards);
|
void importCPAPDataCards(const QList<ImportPath> & datacards);
|
||||||
|
@ -305,6 +305,7 @@ SOURCES += \
|
|||||||
SleepLib/serialoximeter.cpp \
|
SleepLib/serialoximeter.cpp \
|
||||||
SleepLib/loader_plugins/md300w1_loader.cpp \
|
SleepLib/loader_plugins/md300w1_loader.cpp \
|
||||||
Graphs/gSessionTimesChart.cpp \
|
Graphs/gSessionTimesChart.cpp \
|
||||||
|
Graphs/gPressureChart.cpp \
|
||||||
logger.cpp \
|
logger.cpp \
|
||||||
SleepLib/machine_common.cpp \
|
SleepLib/machine_common.cpp \
|
||||||
SleepLib/loader_plugins/weinmann_loader.cpp \
|
SleepLib/loader_plugins/weinmann_loader.cpp \
|
||||||
@ -383,6 +384,7 @@ HEADERS += \
|
|||||||
SleepLib/serialoximeter.h \
|
SleepLib/serialoximeter.h \
|
||||||
SleepLib/loader_plugins/md300w1_loader.h \
|
SleepLib/loader_plugins/md300w1_loader.h \
|
||||||
Graphs/gSessionTimesChart.h \
|
Graphs/gSessionTimesChart.h \
|
||||||
|
Graphs/gPressureChart.h \
|
||||||
logger.h \
|
logger.h \
|
||||||
SleepLib/loader_plugins/weinmann_loader.h \
|
SleepLib/loader_plugins/weinmann_loader.h \
|
||||||
Graphs/gdailysummary.h \
|
Graphs/gdailysummary.h \
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
/* Overview GUI Implementation
|
/* Overview GUI Implementation
|
||||||
*
|
*
|
||||||
|
* Copyright (c) 2020 The Oscar Team
|
||||||
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
* Copyright (c) 2011-2018 Mark Watkins <mark@jedimark.net>
|
||||||
*
|
*
|
||||||
* This file is subject to the terms and conditions of the GNU General Public
|
* This file is subject to the terms and conditions of the GNU General Public
|
||||||
@ -23,6 +24,7 @@
|
|||||||
#include "Graphs/gXAxis.h"
|
#include "Graphs/gXAxis.h"
|
||||||
#include "Graphs/gLineChart.h"
|
#include "Graphs/gLineChart.h"
|
||||||
#include "Graphs/gYAxis.h"
|
#include "Graphs/gYAxis.h"
|
||||||
|
#include "Graphs/gPressureChart.h"
|
||||||
#include "cprogressbar.h"
|
#include "cprogressbar.h"
|
||||||
|
|
||||||
#include "mainwindow.h"
|
#include "mainwindow.h"
|
||||||
|
Loading…
Reference in New Issue
Block a user